var/home/core/zuul-output/0000755000175000017500000000000015134211135014521 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134222134015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000245425415134222001020254 0ustar corecore$qikubelet.log]o[=r+BrvEZƐȒ!ɦ[M cSy-Hgf1p'Z:S3ҜM:~*U糿h[.|yoo\n6ӟ~z;޾}zi^|6Jr_>c^*߶Y٬:|fu<ۭ_x~̎+ޜ/8_poL_bڞֻ];YoZO(_-V,<xnƙQʀClxv< |N ?%5$.zٶ'p~U Pm,UTV̙UΞg\ Ӵ-$}.U빱އ0* TQ0Z%bob  oHI\o.f/M1FHdl!و4Gf#C2lQw/]BPIjfkAubTI *JB4PxQs# `Ls3@g(C U {oDtiG'z֝$,z#fǛVB} eRB0R딏]dP>Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9Ӌ|YWyCNQWs]8MKf, # qe䧤ꇾ3,!N{\00{B"唄(".V.U) f*g,Z0>?<;^N^iD[NrmN@ Ң`?Tã 5g=XzߛoE䭸[ki|X&po{Wl9HGAr Mme)M,!])V_帛AB}nyи0stĈCo.:wAZ{sy:7qsWctx{ul-+ZYsI{o.Ra97XcђQ0FK@aEDO2es ׇ# ZF͹b,*YVi+$<QMGhC}^}?BqG!(8l K3T[< ~6m)0}(*T7Siv'=k 9ԻreFj?wQ+KmrI,W i̸.#v0nFNV-y(&e'd,LFlPh ۬rW-V5e1߯F1>9r;:J_T{*T\hVQxi0LZD T{ oWHWc&)_`i=į`Pír JwJ`}OPSSii4wT  (Hnm//sE炱}r4(9qfhs8u'8KwI~3v4&8[q_5.)Q VE JN`:a!KM/ 9ÿ#:7erԮoQ#% H!PK)~U,bxQVpΣ@Kdb5)%L%7׷fw.3;egO ξD1siYLizpV:Bӽ D>P.BvJ>nIyVVTF% tFL-*$tZm2AČAE9ï~ihFf&6$&̴+sO~p?5!}~B}-{C):fUr6v`mSΟ0c/nVאUʇa )$ {SCBoX^P\Ja 79clw/H 鄌4:B%cXhK I}!5 Y&JO _y@}DS.€>2T0|9ģ7$3ηz^.I<)9qf e%dhy:O40n'c}e1XҸuFiƠIkaIx( +")OtZ l^Z^CQ6tffEm_Eφǽ{QiOEG{P;SHz"2Zjǽ}W4D)3N*;D֪v3l"<, { TmsGoI&o'Ad.9,Ç"q ChCMAgSNdL0#W-CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqAqO:rƭĘ DuZ^ To3dEN/}w zI+?|Uz5SUZa{P,97óI,Q{eNFV+(hʺbW»ʞX6ýcsT z`q 0C?41-5_n^ylSO2|#hIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5d[ߤKߒ'&YIL{3ilLJ!Ip,2(( *%Kء#AZ9 K>UHkZ;o︍8MYDa3kp1.m`XIB[9% E*:`cBCIqC(1&b z]fN_idQv7ݸCVA/P_mF@?qr7@sON om$rnt^.kje*;iΓA׼]/Q)-,=1A ӒsK|ۜLɽy]ʸEO<-YEqKzϢ \{>dDLF amK/0-Vb=SS|k1A[|'bɇد;:X:@;afU=cru3CK >Z%LwM*t{zƝ$ȾjHim @tBODɆj>0st\t@HTu( v e`H*{Ögڌ:8cN|U1,-N9 dI [@3YNє0vۈ/ze{< 1;/STcD,ڙ`[3XPo0TXx ZYޏ=S-ܑ2ƹڞ6կZ8m1`q@ewQT*:ÊxtŨ!u}$K6tem@t):êtx: `)L`m 1ZK(5dc}QQufCdX($0j(HX_$ ^22ݡjR:'?m@ڤB^dh NS߿c9e#WC _,XѪ;Ʃ2tStΆ,~Lp`-;uIBqBVlU~F_+ERz#{)@o\!@q['&&$"THlw`JS[l52, 5 CۈP$0Zg=+DJ%D  *NpJ֊qTf)vtT̅Rhɇ ќu^¢6}#LpFD58LQ LvqZ.DOF_Z2aޙ-did˥]5]5᪩QJlyIPEQZȰ<'\b$BrW XWz<%fpG"m%6PGEH^*JL֗J)oEv[Ң߃x[䚒}0BOnYr猸p$nu?ݣ RF]NHw2k혿q}lrCy u)xF$Z83Ec罋}[εUX%}< ݻln"sv&{b%^AAoۺ(I#hKD:Bߩ#蘈f=9oN*.Ѓ M#JC1?tean`3-SHq$2[ĜSjXRx?}-m6Mw'yR3q㕐)HW'X1BEb $xd(21i)//_і/Cޮm0VKz>I; >d[5Z=4>5!!T@[4 1.x XF`,?Hh]b-#3J( &uz u8.00-(9ŽZcX Jٯ^蒋*k.\MA/Xp9VqNo}#ƓOފgv[r*hy| IϭR-$$m!-W'wTi:4F5^z3/[{1LK[2nM|[<\t=3^qOp4y}|B}yu}뚬"P.ԘBn방u<#< A Q(j%e1!gkqiP(-ʢ-b7$66|*f\#ߍp{8sx[o%}wS`ýͽ>^U_S1VF20:d T2$47mSl*#lzFP_3yb.63>NKnJۦ^4*rB쑓:5Ǧ٨C.1`mU]+y_:,eXX맻c5ޖSwe݊O4L)69 War)|VϟT;Cq%KK-*i ѩQٰ`DݎGu( 꿢\cXn }7Ҫa nG{Y bcWa?\34 P U!7 _* kTuwmUr%ԀjƮĀdU#^ۈӕ3ΊeBO`^}ܖj49lnAvoI "%\;OF& wctغBܮl##mϸ.6p5k0C5PdKB g:=G<$w 24 6e/!~߽f)Q UbshY5mseڠ5_m4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@ L⃳662G,:* $: e~7[/P%F on~$dƹɥO"dޢt|BpYqc@P`ڄj҆anCѢMU sf`Yɇك]@Rɯ?ٽf? ntպ$ˣ>TDNIGW .Z#YmDvS|]F)5vSsiExţ=8#r&ᘡĩDȈ\d cRKw*#zJ9tT :<XK*ɤwoJarExfKB4t@y[6OO6qDfEz]1,ʹB֒H ֱw;SpM8hGG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_q7:.rRGT;}:֪a$)gPSj0j3hLư/7:D-F۶c}87uixoxG+5EekV{:_d* |a%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKdck vz(vb$^Nyo$p[DtUCE9s iuKVMٞM9$1#HR1(7x]mD@0ngd6#eMy"[ ^Q $[d8  i#i8YlsI!2(ȐP'3ޜb6xo^fmIx nf^Lw>"0(HKkD4<80: M:'֥P!r "Lӓݰ@ 9n# " $fGgKQӦ4}Gn\^=-Y5PI dPN6 Ozځ/פ|5) F[ڣ$2*%&h v%9HN H~Q+oi?&۳)-nqK?2ސv/3,9ҮT9Cef˝49i.2DxatC<8iR/ƬйR֌vN8J"iJ. T>)qaY4ͬlyg "]BvW#99`TegõII kюHLa^c&/H^FFIu`2a$mc Ry+R:LڕDܓ>Y:]t.+|PT6=qWe0NƏw<6o3mv8k vGOfpEOkÈWȤMف lOc;SR&.w,qk>MPs+Xh4iyuGRd֞q鮺]m S{}]U kV0/ŜxtADx"Xh4|;XSxߵă@pE:y]/"(MCG`ʶϊGi+39#gNZYE:Qw9muB`9`LDhs4Ǩ9S`EkM{zB<˙ik; JD;;3!4 2Y.$Dwiu|+lO:k$]ԜYLUҞ6EmH>azʳ/A+ԀZk"f`.,ל{=wh|_qYj5M{K$gv>cDp"'0޽5xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=n8D-9޵JKw5ُJ,􋃓ZUꋼ0b1f87GՂ 1t_o}{Mr7KO0Ao-Y*Is\S:JzA(:i!eҎ\,f+,Ąt78~ڋ~?[F^.A'!,iGow3{'YToҝf5ޓ[he>=7S8DGZ@-#]f:Tm?L{F-8G#%.fM8Y='gیl0HڜHLK'Cw#)krWIk<1څ 9abHl:b3LjOq͂Ӥ=u8#E2;|z꽐vɀi^lUt␚ɓW%OVc8|*yI0U=nFGA`IC8p+C:!}Nh,mn>_MGiq'N~|z`|mu}r:"KiyGҪ$& hw#4qn?ܶХfm_Ov^ܶ[6j3ZN9t9ZMMM)I[Rχ/C|W䳮yI3MڼH9iEG&V 'x`u.̀ab7V<*EzfH{]:*6M x-v쳎M'.hO3p-IGh ܆hR ]zi2hB9'S_;I/d0oIU:m/~[*K1QA="D:V&f:{7N>^uU` c/X)mS5KC߄":{H)"%,!3w{"ZWÂk>/F?RJ>FIY*%5Hg}3Ď89؟N/pgÞ tJXB-Gjsٶ 3Gzp؍H|*cyp@\첹,[up`uV,\KCB\qGiW痃[?i?S{eϻl71X:݌>EEly(*SHN:ӫOq{{L$?Q{϶(F_Ej>3mqfΤP-j)H˧&8?a?2xĐ+EV؍x0bv6 fd1^ 2ӎԥ sZR cgu/bn/34'h9Dݥ:U:vV[ 'Mȥ@ەX㧿-p0?Q6 y2XN2_h~Cֆ֙82)=Ȓ7D- V)T? O/VFeUk'7KIT, WeՔ}-66V؅ʹ;T$pZ#@L; ?0]"2v[hׂ'cJ6H4bs+3(@z$.K!#Šj2ݢxK-di +9Hᇷ絻+ O.i2.I+69EVyw8//|~<ëng)P<xͯ~? fp,CǴ_BjDN^5)s('cBh+6ez0)_~zJz"ё`Z&Z![0rGBK 5G~<:H~W>;ٍVnSt%_!BZMMeccBҎÒJH+"ūyR}X~juPp- j\hЪQxchKaS,xS"cV8i8'-sOKB<չw"|{/MC8&%Og3E#O%`N)p#4YUh^ ɨڻ#Ch@(R &Z+<3ݰb/St=&yo|BL,1+t C<ˉvRfQ*e"T:*Dᰤ*~IClz^F6!ܠqK3%$E)~?wy,u'u() C>Gn} t]2_}!1NodI_Bǂ/^8\3m!'(Ֆ5Q&xo 8;'Jbo&XL_ʣ^^"Lq2E3,v1ɢu^}G7Z/qC^'+HDy=\]?d|9i,p?߼=\Ce"|Rݷ Q+=zxB.^Bld.HSntºB4~4]%.i|҂"? ~#ݤ[tfv3Ytck0O ͧ gP\|bЯ݃5_uM Wi·yT"^'~i6֬:v~m!m|X!lk҃=pnUגZ6p| G;;74^l{Pclwů Հ}xcSu)6fbM/R(*ȴd.^Qw %"=nluOeH=t) Hİd/D!-Ɩ:;v8`vU~Ʉ!hX #'$2j1ܒZ˜bK@*`*#QA 9WykGk,8}B6{/) ݆Y~ 1;;|,ۇ=sxy+@{l/*+E2}`pNU`ZS̯窜qN8V ['4d!FmaX-6 y:1V(!L7,RPEd;)QϢ +RlWDžuF7LFֆoM~ar*EtIbW>jqour?qzJJaQ#-n`/$fhnqgTĔO5 ꐌSYXzv9[ezksA`<dkON৯s|&*pNaJه5B5H:W2% `6MRR'xZtfC$1aH_dx$1'/v^ZZ4`9);q`F"d1v>ժbLGd~MP%m x52LMF9 E"A,S Vo}\"X.2< 5FB΢u.`aJ#Tk’"D#cuCXȉ4 ՖK(KP|dZ1&8{9rLnMRф%V Ng2K|`ot.GSGd oE'!B'Nb1{8LW^9KbN;sö!`0ݘ/l+1L#B8U֕&*?V6N{դ}Y(INBKhx2 *MOenT.a~.E jG)j{=u^K+Ȫcv/w#MivX :)ǪCZUnAS`SK6OSxa3 W; K>窜̀'n 3u0?K@BS %fee}i]>̤+*l:\歶!IZ5>H;0)N.w7ߍ|+qUߤ^oå~4en\.cY[s'wSSۘf ?.D s}Y~/J[}jX^ޗ_-/̍ݥ*n./cus}]\>\\^'W_nAqC_oO-S_sOq?B}mmK2/@DJt}=xL@5MG0ZY,\S Eb uw:YɊ|ZԘ8'ˠ*>q/E b\ R%.aS qY>W Rlz!>Z.|<VD h5^6eM>y̆@ x>Lh!*<-lo_V684A飑i2#@+j3l૎S1@:G|gRcƈ?H(m>LC,HI~'.Op% ' c*Dp*cj|>z G` |]e*:nq!`{ qBAgPSO}E`́JPu#]' 3N+;fwt[wL X1!;W$*죓Ha-s>Vzk[~S_vD.yΕ`h9U|A܌ЃECTC Tnpצho!=V qy)U cigs^>sgv"4N9W_iI NRCǔd X1Lb.u@`X]nl}!:ViI[/SE un޷(ȊD0M^`MDN74Т C>F-}$A:XBgJWq&4ۓflq6TX)ى?Nwg>]dt*?Ű~{N_w7p682~ =WBX"XA:#u-9`x 92$4_>9WvTIj`+C2"s%DƖ|2H\2+AaTaBˮ}L@dr_Wfc>IdA Od[jlec=XJ|&+-T1m8NP$%s,ig\Z:h Ћ߉n!r}_\ \5 6 d#=&X^-kOwĝJO\Vj; )!eoB4F\jtctUb.L[3M8V|&jZz/@7aV),A[5TpUZL_?CU0E [%W%vl x٘3܎y,< )i7 Ո: tC`\?c%v7\Ct!$9iç$><+c~݊lz1H[E'2/clQ.I`AWOlw&5fH n`gMytdx)lwAK~GgbJI-tq5/i ?WǠr^C/1NEU<=co(k0Q~wˌ\g,\ rf\PUH,L#L7E"`0dq@zn~+CX|,l_B'9Dcuu|~z+G q|-bb^HcUha9ce1P[;qsA.Ǎ-]W‹y?ڕ^Pm:>I+Ȧ6' ,}U=̀*Eg.6_~OJ/8V ?ç&+|t><,BLqL򱷬dS{X6"X#-^䀕#{К4i̎'QIc(<ǩJi lc*n;YK?mW/ϜiROoݦ3,KǼ[VD1NcX,#'WHb hA'X$i,*γ'[Jd f o$MQbJ/4<\\ Ǫ H` QJW<#ˢ+yTؔc iUQ*p,z RbԼ 7ͭq{\~*ϣB6Id S{x!ĩLأ`J,~ }mjƗ1V bbpyp]KHxɠD2 d/U ƧBOy)3陎?1-0-zBxy3!~ اE|5Ns$9yiay.)[1]RŮVGgs}#4s3y| b|^\1Í,Wϋ+4Hޏ7 ]] >mCckvp] }Faợxb5]jx.7)Bo?Mv};n Qoe=jAa) `c M#6rr нzd$x}d0!>1>c\f=T#xO=q-/w,w݅"{GBѨLaʾ'ySXjG̉гA>}Wa^Xl@%"Uu~*/-3Pl{ }f5Ѵ5kZ R(y" &Bx1cW<*'"oL1 9 (IUȰ20OQoX3)'.;+ռf(X`y:Dy9Ia,&q8Vj>Y:H+m )ula YI$̳:̂b`6`~tW$(RJ'QS7~ ^NG,MMкBpS..|A7,/(l y%s^;G_"1nloj'ݕ<i2\8AF7dЈ JN1 ]jm^~˺E_(9R9apKQwL (ow{LV9mZyl73B A`^ 9Cxw !( oQ›}|Am^gM.hwɫ"ɹ8`bxhǾ8웸AٿJԋ /WH`%A^~|BC$ k']^l)ޔv {mӗGIވ#J͓1X^%L39ǰ8A]j15!p.>cxg1Pz{~WMqZ5 y%D“/x#rtt֡5s6H_Dg.HuxYyĵzU j$@ |m)^w^G8L<B^7#t]i&,` ģE] 'keZ9$mq ^~9>}޿g iv_ >f2 >S~ 6S;w4ʗvO*Q^R n<ؐ5@Oo,H]%:ASV$V*Q@Jۆ^ }HȈ7I}HO=jmxKmI 1=/a_[K"-)kݒnঔZ]򬊻zə)GUȳ{O vgi Wqa0˂c&؞M6ϳk dOtBczFRU>4arX]BCْW[)iD0[T(k!ކТR }jj.T ]-0m7@.FZrZq DҸ b X̉#(4jAW L4⮹qi{D`R 2Q=qjmk! u<_Eajҟ.0Lm%-\߆YQH<:4d]c+CmZyp:<׭nM믿\ЖT8+l߷i܏"Z^$|;bƑ^wfge˅;Os|]{l cAXZcA_|Gj5+!*yɯڲܒ U?(P mݥscjWɫM*XƣCZ%HJ3]U$.YE Q ے`.h-*_EIDgnu.HmIK҈Y*Z4L"s$yTrrbv[@tϐie;4UmɃGO.u.rtU^۝v [eY(/:ξϺf, R6ۈ@퀴^=xg ojz`tEJa+b.Lc,,`hbuR \|tV98 [u7(H Ua.Pv0whzB fbIw4lAvbI,3Cu%cQj3ݞejF0"C 䊮v!jq"O7ʀa1S6#_99Z%F{ ~&ZkK:#LsL$PLmkV]sm۸ [ak 77ޫ,BGakk:/1!kr2˳$@!|{2PmW!yf߽3`+.MkOt2J]rwf~Pݻ&TD,qw fj!`H)lދϢ F f[ wo2 =[7<`{W 1#>hq@7c'Ow(oj pٛ_f?1Fpv8_Ia:2Fpl]cc7M _|%m5xz y`ؿObc)˷fŌB Kt4nvxؗ>܀enL+p -R?IYѰh;h],R\<+4J9{0SL;zFo߿}$pQ.s+F`6GGGxZaMbJ9b:wŌOy).O`ӧ;TꛆߗS}o _ ot紵] s8+(Vm(u)׌\$HD"dn [vdC8wc>fԛ{Fʦǂu sqwuXBSHaT#@3%u vLGW041&?bmSʩ4 $P ި+b)_>J>URUtt(:NfW:=¤of&^`ƦnʸT|G=~N (3 c $ p)Dk5ٳ&߶_8i4 /3kt1CiT8s*#9$*w\- RFaq#X"Zӊ|ϺN&ѭ@[t┢țwo,y'NoO^˓ȿ|]y4@b@rݪ^ON Kx~lJԓ#F l-S"v 9:D!ĀٺM"B %c <:@a;w?npjU_ Bwa!_>y%b qd,pX_] ˬA,^O9=CnNN7( DPaO<]vA= U7XN#/qV. PBod8,02b(M@ *@f`Mc"􉟁~;Hӗ@* PS6"-U2.? 3P"֏p_X8}ȑ@>v@VߖbXDry!AǿpU% ;y;=fzr>VS-RLI to@@= (kD~_||C^?h$5SAϩO{tP~U9 y}sj6|b'?+/DWu]ؼ'u`ח8rE$6%%g,1/VAds1I`rUB8U2lv%|! h`.}xy)\ѕ˲|U|I3&ȁP#nX?p`4yfIu[jU*EPגP<eV"ރCrEI-26| ^V8sQwֈ6ˌdX͑JOmK!.K$R;S' ikœ×`ϒNخ1{Xfϙ,^e\0l؞csv@S;$){|`و fó\K=?T>y_"ƷS4g ѸOYձCE ?*6lܐlDp|!%A,bbȻWۖ]Ÿ aL{E-o)}TCpm(pM͎gIQ044p`9Ϡ4/s<XN o!fu*}}BN+9ĩjڪp?R{J@ %.ЃxRD8?|6# 拻sqC}M(P|YЏ<~#E0GH>NkQ[jzt4ܼ~OGL -LŠRԥ!rS7pC!4̳B*U/0hōj;}?%hqƎ[^qS|S}V  8wa5j{«WY5ƠG_@_zPQ<݊_}};`_{}qƑKܑznvx=LUiǞV p ,B{V~k (OPDA&iRI'4*}fҧ6+ݨ]6F-}n>q7i]*-@Xy'J6 7o.( uWu7\PyO@PoUPoA'DA W7\PyO@`U`A 'hDA  W 74\yO4@hUhA'hDA5.9^Ic o!@ɟ$V/BoqhrjyQyYy6C=hL\MUtdr]/ŜpOBI oL+ʼt"u vak!_%m/'{eslOsw9}IQ}dN^hrkQ.>̻Spt';:vKuA|]9HF]8mbҪ4|1/&)@яgbᬮ!>t= )a.?qϳ&q]THбLH M:B+Epڨmr$xDY_ɮִ>x4ct=bK*T?tNF (C$Zn-|+Ra^BFA3)|B Iӂ7 #/3-1mwNqnb;QeET?=%}puv/ ul92' 8Ʌ2,ezxn{]i ʼzU:ͱg0gͪLr2I!3fu'uW{!ǫY2ܻO^(mc 'mUϒk/ d|B# ӞuEG< (IiW.[2E Җˠ|^QFxP:53ၶ6@9F3oU 0`(`hH$M RBi@d|&T)D1FJOk/7Zߓ9#.- .3^dHY|YiW LH_;r:DPOqmWJ?TԋjXvo3,7d,Ѓ'0j7lD,E4?~pn :</qa 8?*F{9' Gz5ӴZ3|E;{ӻ~:=4ㅝGSX3ج@T {R*|bg"DayR܋5f-Muu@guaWQI<ԗIո`m .ydAyxFkB5+<|г6c;gȅ [086EᇊiG,i=\|e":Z]]]KuR?ߊՈ!\Wg[a 6!h{`u>iemauABӌ! LNimt=ghҦ0C.:Ð63TlJXaadfoҬЕgf\o{I3 iH5o;qsi[|G;R$a{[K K0G·9[Ƴ'lmrj w16xv9tD,g nvE6@P t])UVFLYDmз*?g shQ72Aa+Q^8<,ݲ| rKT75? u+nY?zs{"VצTg-s|gQevʶӣg&'h8b$POܜ:8QnOI*G=d@]%=0nVČ 06WDglCT{0uKgL5> $>>[L,7FfwjY%;STcca~ղ  [ȅZEfGs1W|)nls,9G:,qz)T~W(My_ʴ.HKKJ\Z"m"m4I_BP's`TU +HPv٣d l [T;KO.fY' d^%=1rDA,]jKsLh~Is3{4kz?|cJ2T=T>eiOzjdU5;n""dhdb(tzĈNwPA@$BCH5b1Y#TlPU"-1cRp9@aĵ{"5zc"}OakPY2KHmZQN9Ii"+: v=^G-ăTմ!KԳnW;#-myoF$ϑ>w5?!U!㡨<EY?|%"i$a)U0-?ZN,ewˋ ?jc$`/ExrK/y֑K_ b}] f̈i-o)]epl%.|S#|N&=mǛMѿç7S~_K~uI _7CqMɝ>?ķIgQS_8'QFg׾QLk; B\L%OG'NZc ;m ; ;5O|e~ua;~9g_&‰E eC1σ>:3rOc~(6qYnLxGL_~RM~)`F΢yQL44/,IDCa+oӂ;, %8YcvDE$:Z Ss[8t>`0oޓgPจIb@Oa-6E)&2G# ׮$kF #S.uc!zϴ憥#K4B8vrL=jӶLX2,REcg0.ם}Qq>G[u>&Ԫb2X!00r bİN4N?5 b種ZB;e53CŜu%}g1 r,pw/Ʈ6l*A{1.y+*7֑ⓋI^#Y4ɱZ#s#My$nD0؍dEpZʒy^S5l8$ՁM+e+"*ޣfhX_g$x1HDn´OyME 7CSH'HRؘZ ^Ԇ_Wo]GPC yۖ0I0L i΂PDfUKyIc W!sVNv/ w vr41 ZMB0Rd:Y AXȸ#=ܮN<*{{Q О- Q8:U++oGE"I+N 1^/@D3ڌuƙZ`$v-kZYشwY"F?HBkm*Pk]2噅^'8c$$15ʬ&gXС:岍ꛕ Iߡ/D3@j5 xa$:YuU%SS'N0FkQxpj0]B=KRb-\t9S3 ~Ԡsm6wdI||>ߖ;C?Ee;rD8Kd\nEN5Y%ӊg%Ȃ!FҥUiMcqLB .CJ"KcDC+);l!F_-IX/6qJ؊&-2Ď͑,*q;K7D*]&i:56ģe tIR1]Lї"e$2c[)VsJ tPjX-53ƀ_CO H"vVUE|53"fQ8Nr:y:,ycöZ8Sr\ԱEհ[(ߗ4EWrN j\fd0xhU!+u5* t :m*uU= K?y<{^FbDB4}}1kMe12; <0=g:D\EAH5H3㚔wʳ9Bph n9êfq%X{n{N}]{u$8.zX:et0jL$z x|Q.Qx1ĵtm5sx6KYFJSx8 oKZ^'mZAbGH}k CoT!v/ػܸH7z$c^@Xg&F 1kƾ*],uQ\.D&{y{X" r}*Bx&cT%>V!%-1YF |GXgx(G|q;%9[ocp Uˍ3$r#CED֬6'ISKG<`$/zu `F 3L6unj }gqIrpWxiDZA3_ˋ'yŐ$*Tߥ7ׁ]xt{ i p>6Y3*H 3mJ,j/DVXkێ$O Nf[8*19^to0ş0ݖV1m$t{ڭ M(DLaktlt`mHzptMQlN+`/RtDkuU,'F= E+u.="$ nD @HWګ\[챺7B*fM)Lqst~}kty@E)%V̈S\BQR?X.T9Ľ$F%Sx&7J^HQQ\ 9Ku7!6ANL2+J=s9wU7*]] ։KK7fw0GKaWH^ n`A‹ck*`"X02X ٳVRZf[ 2 H:$ V-3M xnD0S'x]=ykZy=#yZdP j\*fE |epޤnO\ _8I*̸kT 4ĕuD8/wDqkupcY)5t||/i.PUZ8"Zg8R ` d{5Δ1X`%!9tIyޑ8xcbیpF𤻪Su3Tӧ{cW 3Q:P  RP}_ﰣ~ݔK0ysE0ls6 ΝiXm ^&KE 72p̄"FL99^Iz*R([Ժۭ M ! ~*5epQZ)G(msvF]j0 *\>i?w{*"dܫߚD}$э99'z|}$q0_2v0r:n,D0Sd!0BH$*O$<"~9&t>=$:%Fc7Blf6>Aݛilas0b67x)[%nIdsؖHV=~{G]Ԋݛ:uo̥䃩j-%0 Ű(/kVZdqۯƓ ڦY=̮wG`rqsDs(pZya:mueYcw搜`GGҏˣ`ו7(P? z r>~7S{w1\迂[x/BypV'|4r|,&Gӱ@13$ثCL!BUe =xi7rGa~ 1W1?/L\pwuR [h?۵?5hxV )+hmY4Yw0*FiP@9HKV}I!@6v`Ԗ;UGd7;91<8BƣJM>i zBoMy3To* igT .A+Y*ɪҸr$Qfd%4 7PMk&=>x|#$ h~9"Hv*@`ʻC pA>an P0n ǢDV:`9t8CC<ɹD[ڠ6H:@ "M^i!hVauvA)$S m 0t:)d)*%&*ERlnFtCIOo*-:iSt5 ]țItiU&Dt)խ _atI4AՆy !„|^+( ޔ>&\ ǩ,MOQ&(2۰Dj~ZcF)`h\6°B1F25 qǢjSɾ-"n43]9OFXD,Q͜ɚNF府TԌRrr笥j1]^]V"Aדwx;"\e*nrңXw3E=nv˾yXkZ-q8``2V SyprO{7lUEVh`1y:<-}\#B$Q6P̽@q8\P>!{҄Rl$oE>Qgȩ%ԉ\aBǂ>ЃIZiUҮAu߀+os,P)Q9V9e"=%YU-t3cHfF[i2ZL0ȚA^ RE;M0FH^yFriP<ǂi^Qä)껙bM6){'3̒[wJ~NƗ"quܞ}tUl􅨺CnY^5 *[% Б2-R>!,WK]m*jlchy../;sc#Lנ~௮ÒܗYF8a`'Ę16ޕۭ}T,/q^STvɗ=)"_%h QK"Y_~ٹV<6qÏ~Uw٪eH^kq-9/4|W̯?F2/2a| dX\s69=h-^dC}* NmС2AxEX6q_AD]ڱ; Lsu(h-`yT$[u jW8yA"L~K vww)DP236-oDm(I6h_<IxH]Nۏt\V?x!m=gWd yyO(w!)5<DJ,8h9Jesn9#%>+cџ,Cxiӊi vS]T< M:o+pW@3J}kƪLv!4G8XW:t*1HGAi$ 8_[3VH3V@3 ьUT3H@(+չe߈@hrS4 {c\Z3VH3V@33V$R͠Re9 8pP*Ȅ3p0DPÐǹsFӊiF8I,oX5dMq{aG(!-aUK ] h0T Ě45iTf5yfO[Qo1 x\{Om&0+|f*Ô\3P0No&ı-&+RCSq$/}C0ZK48`jP\cW{HQir?ʏ0CB陿s'2"O|#Wz0:>v-qlɶ{m`!,!ȬdLZ:UUi/nӑĉbǫ Hd$,7"[,BL P[LZ p3I,M"-7n&(՝;F1*iwqp :$ql&=${QH+\V.jv9ャJ&XNDD8㱊+<~HvH3s`r9\+BkQ;%$ H FRD0NcMZLU  cN"P2ύbJsT#ړ  L`Nn&]b@^nyt28 Xr,ƜcS,c$wqҨ3κAɸ/ ` ,q.\j fԀaW6'9ЅgV5nÇs0,xaSR*`kx^y v˂j !|u> RFhˠwzE}-*ϩy}̃PHg{v:(W^B9kRm% $BcC*Gs8(jLDM]r4ŨCyW A!*51sEI0ة 2\m O4ǚRQ@%o D Dq pM> sǔ zm#Ibp{SC8_.=c8a謑&4$*RjF!(`g->ꮪY BD0.ąIҐAg'\xnsvmH |\ʷiZ۴u -8BT{u %7]UT(o.;d E+pm V(2{  ]_(11 &efB򇌲];*M?DHKV*A„FC<%F%ŚQ#uW8񽻂j⳦fm+纑h;ęn]l:ZMڥ ִJm8JeɰRSdm=%9YܜbJ0Kn7."\b\PkY#U+=4m0,t^j*#1佃֒6yr_5;6ǃČU㖥I"X¥5hKT~ P >x޵RubCMTd8?yOd%Mc<V5fS^-pGp9C:вXڝU3elX6P5F5J\b&3V8{0Z FCv%Lȃ5F LIZW5hي<ܞa֘,@5d˒rK@L=16`5*K ԗ[W'UIS*0)ͯ˭X5W7Q%*2t+x1>b IBf53r 6Bz%ꈢuCmWi n>Xcjp:Yj-g]EP_-x3Ţ0K痗&tQaLױ 9u"j8~|K%P9U~nX*%o@FM \$Z݃RrPѺDH 6o3A6Êpm[̐< D Lh(܌XMN8J(/ Br~q"hA5wJ.HM18ῂDk'2ÈҞn9<d2PÍݝ//7Ҧ.U`h `K_W .%mq\P :3-YV&N^3%H{5OyAjT)/ 0S9%ZTqÉ}eGJ (~r0/A`pp_J8"Ey]xݻJiz6 "j=@PxѢ(?aUuXdPq'GǷ.|=e < ^^O%܂_i֗Y[>8D&Ok^¦~e翺AΎU?{M{pϖ,_n _۽CUuxi~L\ԋFs0 mf2vM9># 9>.5([y+ޓ;jns:u!v!%tֿky?P#{|ﷲ̇^ޟ/d%k1MuLm)_ @|7ELjpq]O޿M+P{gXwb;E@7Z֣ 7lP=\I@y6? ȡB|f".lEv s"]6xU }4 ǧ3$y>mꙢ 6k\I2+M!g$<Z@GCԲKXs|4`uZԸnjK` yrY.)iM ?m_eT"܆1UB$_rӒHr 0|`rKs/PhGdϦ9 ԂuxPeZkINE|^$}?sR5BǗ/Ase3J(( {_'/K'#_\Fñod(D iBѻ(}/~×ȍ"lz5J*>F$oe[!Dʳ즿=ppFɴ?|iBDsa+S(SFf8'ߖ1aԋ7.6s/7;NbFR޸04\I jWGb Tr< %0Lehe67C)g^exu l1w-1Ny a顛I0J'3o3gf2.YY/pl>nxk/g$tC~4b~݀/֞z C]'PA'眇m'>i6H k.dkI0/8}#cX e"ܑ-=AjIq ZbxgGeℑ/"bPlR0,Q_kOwAMz_*E3;j&Ą/2%#IJ W,,QЋ:B,JšԑM'K7(Fi-Bq t2AW,;_3kM_ rTn|Oa؀c4F|Ace1X4±:e1gV+-|>g#rD*8pfU[UBwu~~ZIdG8ʄxo-uW#w 4ƅq{=Sxo]q?=сՂ?,yNIF,6/,+u BҒ¾&Bg+s!H( p?,%LÿqDž.PP)e? |jpű.!11DYjc9ZHΔ X9!gI h;Klԩ17NX6JE߲Meߒu+q}S=L{1dsFQ~\žµ Fm](=SF<^Nzz!Y.]H}d.q|5ooDAtdm8ߡ0Q2;Ҏlg<.TXx Fķ1D3*!Ns:}/su$NGvJ϶/ۑZ3|_D/*y?9a]Fvb-Y3c/a2$7T n+Q{>$i?RRƹ "nDMUH # 5D C cZi“n~]~v[Z2)νB/χCRP#=P$lm; Jtt+ҕ~Ri]问KW#R v_:.ԕ~Jt_/]问Kg]问K*/ ŴafO{> 7S}5ϥvXf#.xw9Ԇp2pJhi@j Q1,u<tLxl%TIn<;ι]2绗{69z$cx׷o)x8*E9b[bz[˒5n[ךNF.RIi)x@<,} Gӛ+ >z6>B t,> fҫ g:Fl9C7zC`LĝMdnR# fqW &)v,qm~|ҝ=a>R :Sl2Cņxh.ӧٯ٦O7~&1sIU:o@I H?:= !QC-ߢЍ`x^b0)V0SVV0!}m~PlhzZ^EdbkfQ[{r ce>?}6WƕٯȢj YSO8F|#$|0t>FC}FMzxY])JJ,!c&/&)P|ɲ+|,: {_a7 NƘ!8?q$Iaɪ0=vz t-^I AiYIs1zq`d*ő T1dMi>(E]\ n-x{Խ?pZYGٽ]?ڗ.6{Yu}ѕW(w$jgzy1*~Gasg:󻙤Jv bO;O&_?, z snZJ 5TqPuit ^)])]/͔.͔.͔E+{ З>EsStSt;>E>E>E>EsjRY|^/waL ׻ƯRڼÀe)`z~_[ X/UzZH>JEշ]@Ъ/w_ЫW 4Zln~jU诲;KcĮŁE6Ζ-v{ .UUaY,`_u-f}J.T8zQ헟B5(K;qiuiuiuiuiuiuiN}dbDRH?\2#e&؍:_Zf nls-N7{4|quƌNK7X~wөlމZv1r2xte+rERc-9D2ditB)ѫj ģ:D,W03&! N`a~[oa>uHƱMtvo{kzY#`EWA%20Hu$ڎkׯ9=Dy`_}: =->v<`iXZ9`IkFeY+V3"  I 6sx2Wl0xKU).ޠʻ"t2en%Jci-^r0f`Ed !kp`_]\S(Pܫy<4sYŊQ:da/ؠ׊obr Y|[E/._E/>քYHZz)l*PAg;6J8EUQB|!Zvn/譶) Q1U`Bsf1%|$gxٱ^zV9.9uk9i5S=IܿLͻg:~ ܯ益P9}B<6"$(f]Γ1"hG>TRL&r} Aޢ$` JTQcF{GUʂ~yMZ3D ~ O3-yȇә?Vm(yX+_{E~| QQ"% I$3y0 yH c#(txJ ^nu /mŹJ4Ïi5#A2X)Z*3n#n6|<ږouXD&dA>yю1"hG>T䆎gNvu~݌gءx;o# ]GNsa n2m Zk73gK3y0 r qCE.s\BuRl<^^0#UQY K&Uk="f#!cn G>TZOױ" [C}{5PE[0yg1T+,6r b̳-ȇxD-_eRgIyc 3)8vɑ`"&7g.0[G>]hPIpe 8wD: 08 M&5KR|۲ڍhYlf¸Ԡ(#)A6E1E<ڒ.|tGtm, 20C(ej->-ŴڈPE>TM|âN"VHBH땔~KHc=Gh("&Z;;6Ydc9u \zdq AGX,a&*юʠ."%tEڋ;Ƙx[ >^.MKRw#-Z`:8x(⤫u0Q&/ d|"P CE^ 1;bj 3WG on+dTb|j d= WXUb}uc/J[:1|(ୌXv6m9KTFh%qM[1cMTV1BE`,!n{!JPDo EB zS,et<aPXYYG*r$ VUDdz#<3vE>TMX x1"FBn"ʌ3`'hp\XeW x6LXAi#qkbzE0C:bGe1eQ |908lb0Od`XU$ϭ+)DŌy]"* W_$-h NL)_\!,*ɵz&ɃNxӅT E!SM$_=8$%KAZyp[QCj{kI(3)LO. vz4/2(._q藼}c1gG! 's9n,ϸϷYMWyuS\8c@I"*#N;7t'}O qm!hьif6 BwA!Ԭ^BI( >ݡWFcptm>2{ÚB0j4#a¤'X(<(9cI30.T;N-CyT=FS9ѮA\D̩ *r*IzSԻ1z$`JL6?sşNQff_@O.RSv<+S1ƌ+@#|X]W=n/w_כm;;!ۜ eE;M$a5& EeNSuԣ@Ky KEhO=FChPCOe,q~aK|+׏-iO$0` Hꔩi9`>"(u3b:_ζ:b.~hŤ:Ƙs }Z.bGCuxH9y[~A]E3@2FꔕKR) XJMI~*jy}E>P`CaӪ;Gc'lOG.cŽ6ia,%%*%,UI*.sW9btF<"(_'mQxv@ҍx V36$PVA>beb68o#GN>|t9et6 %_z~}gH3oo@NuߪS~06[KN%z8tLΕlo}Py3N4S.amB"g9)RDI<"%$9 k qCe- lVqHmdYD:#I`7oJƸ7# bēX`~r fn]FqO!.e('"0J:]5W~k{ P暑ŝ,n @7iX ЯN,EbzƉAh;1("щ9@rk^y6cs1G^]{>|ΖZ)>joJb2c9񠌝#AE\zaݮpGEd/$`QB&HOzp#x 7|[FJa?୔WUi:ɩrjx)sjz>tFzNvZ9]ZcwqήA6V&KS[-t&g~%g*yE>P9, ȜV9b#;r ,Lc9)̈́׌nlutc\fC= 1g w[1Um[V Mګu:qhC܅_EnVYGA8:2gƬ:< eyٵΩim6el(3>"( r[tXb,G2[ tV ldz~7y-m8J8h/D&iM* E6cf4sm*X[y _)+Њii"#M,l;"*_i_H Oqٷ`L_n{(1ƜB֨id=8B* 8h!ev}˒ʤ"?q!') P"SvP69`An&k\ k >0e:%Ѧ(ڐw=4xIpCr͗lJ=={CJ֋-]j(%8 0K,JEVUOѐ k<B8=5N]0- xSc5lnWgFg]%Q͂$y4oqr!V:\ڗF0 x,2-*A7BFyNpT28\sw sM0 q<][.4(:st!"D%ʱ> >_iW|ק|^嫷!'GmfEE >4 ڽKyz |D<t< . (ɤ*Z&ƪߵ^`}Ub @/U^W6MXM3/zc-bM 5HD-2/'T*'n5:2zmuIF z#'DugGou&9"'܃ L1T[x0BF"l!68ӭɕ.a:'<*¥Vxg0 9U8™YlF;Ti^GT.b ̹,1҉/O Q:KDo<0F;_0p.Hˠt)J)b8BKyp7B&)qњq:QȽZ1,7F-F-_i8;:$QJF;=F\$6 Se}QNhNȯ9 =쒯وKн_p )G k߄8I)EKqj\ /@JW%IN/bsMp}0#% 6af$RgFB'MQ{1$ZK2y/ 1&.]0[R:( I[`$tϊ+D,'Ut/ϟ/*t-O,JP v \*vKQHMYҼw'խy)̟Wl87p6Kg&KHZXK?+h xg1g +EFǝ{U+u SW$ܺQɭ(w\R(b wCBF7,b]H1]^6,nCk]-dPY!wWlܪHR7tu2`v2Ji+&GGlY J6+6fw߾3vYgÖ=^BP^JM' UA<zt>hw$ё R>cUFܬwE2{9tũ8#<(a5v v&m+#)Pgz<^:@WY Q=]џwǂJ5!X΅@"'\`^ OO*~݇99Z9&ގ h  / G$RQab"JWr?9x&P{_)TՓ {> Xl#0wQC]9 G6/,M#©?70hJѾ!گ@׊M;0]h]՘mX Yp*ݹOkEʓzx^KޓWPŖB{p9 ~L6^/zpoW0 l8c@^vkG쟐-quj=c\ȯ KY,-NjV-JkvK8RՄg؟b1"ōAԅ3tS! 3#|/C "=?v; p:pb>a?Q~*AC%R%u5ӘX,&;&(UQO%E3 JH8.v`{YG CCͺf-vftr2wu*xPz~-VQPs |ǧut@o7%ZfwI#NE@uBc:= lT&uɌsαc(&4=S z '>"`+P"1!(wcT87.#AR٭~>{%u5 ꑖ:]!@Z;oÄPhՊ^zxk(z[ DO$Gܯ6^8G`Y7Mu#rث:_-rksBBFyDa(c2ܽnz]K6!jmYNc uDpBKt l; E|*9]|3$jLxT 0bsX!֑:Sl j/~Fq@lm/2+ fhB|!*+K*ʘ?wP>¯/Әq=XPEڹp^>)FKk1T4fhWɩ&T-qQqBc.&'5+ _<_T%oH县r 1 ҈P&wnD E u ;BzqbǒъoǎfK(Q(VQ[4sR[V2-V6ϺlfGyL<(HNK!j%I99LY@3N 5'r[1'IQ^(~E%uU הB-1Fyce-Yb㥆(Dv ׻9uQFbpNw1l#Au!UBp^[zPsK9 ߌ ؖGBՊCr,ջiV-AWYbf)_Lx{U/)(x2 #UQ;0^?! W(`ɗq9B hKUNNl,wwQK ȷ. Zb?9\!BD͒w^ ',ciP=Zrc112Cup٢c7<|i SfA}H¶xϲ;(Z ?VެT Sj d:" j_ݵ%u8uUw)܆/p%GT1s0=Yz"Cm2;ȱw51+& L!76Ȇ$<|B;.`Z(e*C\82aX%up4i̢QLΐV`c`UFJs.MNn#TuBG!jq) ;-3o|8{b-̕޳NNLpJ b91%QRb^<7_*@ i̱`2_ʷ%c[418!c,›P2/!0YWd N6=_,ˀ k|Q1 ,B 8s٫k?Y4USXA$FY,".v2Qa1ٵGk/9`Z%o\c^oX){x7jł:8T{4BHobɸLbE(8IHZC5FՄfL.Z&,_F1:)]_[]Z&Y 9/,L;Uov`+Oc&K?߸_לC,"`7(_2J88Ju[^XDy8 ! ~36EN_ :҅9U:^mEܡ`HIP"5BZK$1X%Jgyԕ2eBi4f90?l̝-)o7lVk|{KMn>~އ~e@mj*u*v)z޿~q}Ӈɟ=_iٌ3jqTCuksYA)BY .GnFmmݤO[P/wI*5ß֤~}&7nJ.qզC$ŀC_6.yi}[{_fs(u C"Dua??f}Lp3IMto6{?ux#$M線# 8[)DЯ<δtݳmx An'w{mlmgފJ.#Kn#m,ϝyx'f]0?ULճO-8>!,.β 5uom;-7|}}E΀7f~76qS1Hz^=w8a R !wCZ2zyۧG@sodwsJ[b?x7kk. ))uhhU.$:Nj֚.7vW40? 8dC;_Uʛ6nA*0QlInkJJ~*!r37rIܺ ']0nnZ.fHNY8 ,nˆWਹ%p=8v,2<~)qˤ)7\c{Gf)&B )1˚)dȾ9 XJDҢ{bQ,OH#t3b;e) hI]$_cbFi̢Q:HRX# A&yXX̅Qqh6߭Mb!䵕ͱeT zNߤ$^B !B0geolq PBs8t,KrRPķ J!VX^hF[r2O'HHO |Y^sMt0 %Q!ΊdL53 MbRb`NMOwwl EO*2fvT9˻& ef$ՋtK-ikn Q[po\w4cvr. t&Jp/X9d"u0)\D\iɝZ3Ӄ3v5 n%kb)wpG.;ف^& cF8%變h] 3$@ ౹HȤEKm ]U&d(Kh-͸@ӥ!x20+x.\20- "o@1fEJ}>OvCUF!*\<2)OeN2銏3l*)VZàN5|W^ Bmpߛ$d~HƁ!iMi `TM3@U3eUW.|ʇ.pmOq,W2KVy$peԀ~itKJ2i'H Q:ܟ0L|* 03Ǝ3or Gd4;kB cH 5cDk}zZc,Kx_f4a +Fr(1̖aV[PсkCt<֎6#p4*JX)_XNh4K@R)!Vkmƌ'4ׅr<'^QDO}X#|~7u72o!ާcɔ\]UD V`1gMU$3Gs<#*h.Ae#E Fcy}ffuzl5(/~Տ'Yr_IS:0/q98(lTT#,pWR'd~) m,#|Όϊ<'e2"8j)ɎaSp  +C&i@fFA㨭4 σJCw7*:Q/]gg2r-~';(wo|Ib5 W"[wɻPfE{!6NjIȝZ0cEi=\,fuHq-1k1ďč?g;W|*sB{x|:ق U,>FnvznA7T.J]%ib2㉦uXV!M[GrÖTM!DM! %^plʢ8zMGYuhq/zч:U7{}y )d>'fg"Z\`o0ӔJj)xIg( .;pXie^sA3Eϝ#)1QJ/! :C|V]y{Wsy:0?T.GkvM,69 ؔBʬФPnNZ:)]NJ](+}?3 w*9PzX9+?BF,`Lk\ 5_-x3i_RuA~}UkkQKȄJIU/>HtSC"J(blSӍ{ MQT&n uE{ 0L/s n 1/Ї"5Ku5)7M6] sQQ^@IcÐ*i %xـ2Pƶ3=ʼmY~n 1VM7Ш6aaEy5 C/YPrX[=2lǔ:{ LWr% >+ǻzQ*aJ5nRØN׺JӉ:{i u!@[[P …Px0:ww٨M;5hCiw7( yTFYӍ>6#B]սfikUԿn8 & j: taXE"| ʁcH&8~=x~a~8+ ǁal_o:bj+2|gsH-ܡ~ mzKkb1ƍ2ivq#zc|{R}&Y\wܶ Ye6|< p->>DIMhA\  Gvfed012Ria62Z *D=K;tMw]Vi+iJR_UKYUso^⛁t0X6-V74:)=ǟc`AZmvh. p P zx,g`8q0!c,1ࠖ ~^ ƿ|5\5x䦯Gü#>{' 4%qb4xO^-P9}h@:c~NT!G+#pV"0vѤ@!GD޽L8^,7ÊI\UP=)Ka)FYE`v)%D^`acul&K~T DHX9+%߮ ש0Hz TnA(Z \Xssm@n#prsãcd9(95y0^l)K2K\!hrќ\  _dzʍF(yFY~*%$._OujPx2B^nZTNjlcqhqP*2Nè.6&#ɧ>Zuwg^{m¯J : 0uri}$-jev.+D+AEF;gUX%p;o.[X ѱpO'1my}ɪci˝3yzޝ\ӝBKԾ ^̆w:ztѪ $c'OfV K|"Q^#hJ -!\N@[t,4|8=X*wMRW8)̹Gwr%":cX8Ws H~ߟ>ɂ3"/EI%㴐.G1:aUI> *7;n8}?ɝk 1<ޞ'3G0ޫ 䐿{ ,mCqu^#yX%'#$yLi,950L MHv/,* ЇGUmxp hcDjie F2D жR&x?ж m$MXZuRa`kr|mʨ9)ٌOdY†)%2h؆zA QXئ}x./hv"DG\\;(!(3& nq$WB #/M4"M. ]j[Ժ$ (׋Vxl$8A?_FnVab܈7ōfaRzeFbE4xVxX 7X?:o&ۤZOGp$zvxi~n,M;j׹ L z/>/(9wH%&\Is/,eɤ(ϰ2~=ug;bzWt Y0a*un9g0hSo`%)y&q/CWg7GQ-/!׶~>7Z͏cXH𘭾Xjdksmě2ųY3a!mijvjU̓}M#v(b&ʪNY~ɋw75__Mˏ/ٿػm$pmew, ~ (;nwLew˦Iynglbjr~nKv\SկW_OE3m8Sz>|V7a;>e@n !83;]jL}MN6ڹn趏&%hIt;/<~<~^70e,3B)s8;>i$Xd?z _|襫=dt&U3Gr2AgHLQ|v>t>8!u8  c6C3? P"v:oW\ƿ̀/ x*ϼ *Cc-1D[R]WZDzh|>'8ݳEOz@ %P3кbUl-ilkw'g'Oe2Pщ}DZo[pQN߮6. W棛# wӭ pQK4F4=T&hyʝy*( (Q5b(?1!Cc 1\x]5ڻI˪Cv^/g*b<*Iܑ~Uې~QVi^}C,zh| JW0A(c1%A9'?8c9RǞC> 0I',x\jk~icmU1!G_ 'O@F0i;a% ﹱKݗ;L`\F 40(6VeA9Whm!8XwY8X> D$!vɡwf4gU1fz&MDfipKٰџK+LZ6Rw Dmge>*n$[z ygUm4~ a}ٯ]q9#uNcѷE/i?[Æu. &8/CeS׬^{CЭ瓱?ۗAH 5pU!CeK)~kpvmnuYR&,Rom-ՂZ"6lL 9)QkU ^XJUaPgܴ h-(XLɅBx8&Oeɔ*H ;4_] RFZp$_r-Fbp"T<.4HjFRYVB$υuBUCI\0 #ѶpĀ!7qӂ`h+pgJ_T6)+/Xr*LD)+1::82K@в}]0߃SbMsPp0F/BTWv"԰|^E#gK_icݱ8[()c 8)<-[,Bn8T&+MWR5]xn7̺`~堲8*Aĩ6GՃEq*Vp($Ɔڰ>7uV5Tڐ`8Y ^ Qi8ĸDhPpe[Ҟ!S&(Z|dgĀ(X./tQ C#18xn6 |4B!H|>\-lb'{ᡌC&$B@G^ùP@_![+,]iK0_zh$Gѝ5$K5p`T 뱖x"(jQ08g֕ZJdp(XN,Nѡ砙ee(3vF>$Y6UHPsm5ZH dq:s\ x =l9[H,Fbp4Wt1I` 9É}vkriGQLapWڙ`67" x۬^Tnֹ"SSyɈ9Q9ZxUze)}Q1+ia"9dx`{D{h$';`Թh)B.W9cHd{(󆹯~kzC#18 |9,6(:3zZ@4/I8rx٠P Y*egĀ0[,[,# 0F q /j(,ٕ-kcw*R՚FXq3[L#vV9> niq1lv҆5>,NKBNaoHp{ W-c GЧ)eb 0lOB㡱t8 j'!‹ (93gbp?>ӰABPg<"\> Ay {O <6a!Fۦ}âR>}Q֤҅eX8*DȖ:$4!$?, Cc%*?NxtC#18t݇TNѹ岩>y2!D6sG9ɐPMtpP|a<<1.(TJ]bpp**,U:D &4ѓAE/ѶQրG4A#W>31%V߇\c "5^3Pf kL`8~ƳA{h$Dx?} ALzZdnHЉx=婤*=yPw K,XqpX6HՇ't &9.h(`=4_1 }HS WPA>T|mn8,`R0励K/(h:9/ N!{h$Ǽ^D>(Sm܈P;5ĵGvm#y:(Ps/"J U +iv߼i$h/%VICJ z)xĸp:uTS!:+;{h$GkS[8tPf,JKs Z^6Wp$G'(6R,.RnB<1"J]C]4SdtR7 +H Y9 j\um qr-wfM#18ު< |xLvHKFx6.gHy+&FC ;~QyRM IhV1Dȿs|~;c\UH鋈{ m,ժ@ONDClxsJJI<S(*'Ő[C#18qcoJKE=,S, ׮YNz2 %+살=À7+XXEHhS#to^ >81 O<(<:v?pY|W"5!Fbpz 34! UXE89mli1_| ocn6Ơ<^%jP?1bR4NЦuRIqT RZTPX({"kI&|_j@QPHyA:iLV&!C޼UH\ǨOz9)N1N/؅ 졑Ŭ6XN ND.İP 7Al2ލ%h92S!H(cr^j3 nvVvy@&wͬ<;sCjl6sw0Sw? XrR57_^m˳?@BP2⢩Xݷ#-Ϋ}4nt&.OW!(~+g"oX*H_/F;'k>,9ZNE&uN 6z~r3gW_?+0|-f VWi%kֿ.f}sgK>9 &[=uKWN})0# Y61&G-Ė"v׺9?ɪ֎|r f{=i4fS0Ai8ٟ1//L֠w:SyZ;Xӕ1quN鏮yMvN묮ꀓ9Aؖ(mc[V1"2D eV~dԝ>~n  {ߙ-6HW <*'lux-X3"{iYiC dֺ dN6:|5h}aG+;yZ?yZHюS0@ų_CL %<.|af@_DbdNu26&UaĘ/liRVSc+C㉇KW\!$,t_Zt<Bz$^@^O1^iK5-T3s`sGgDF _9CM_ؿKuCBxy,UgBha=;{g:>{M n]羖i;,OdB;#-٭7Bԅkbv̥mii{NwMbB 11HZȧ&BM}ꚿɢLT=A[2D$C eD=D I"Ӯ7T~("yFifigb\W"^rެE MV_4M+|y-z'!VL>6p fۜJ`-x*S_;y k*OVڔ޲׷翭1źYC.Jl?7q l =ot&ӿ^?߮n[WfyF687[ooA/IN+qF`f! 83;2#XZt[{mG{B%~d*jZTJޘ-Kfr2S_%;.Du{yՌܚN60n%;},<0d߯` ӓg@ST(㍽ӍoC^&.?h m6̓.GTMZIL4h#L4eUNsR/?l`os #.&¸3SXg7Z-O*h2o8Snj5"5c+pX`qpq_?ce3F `G'x _H7L}˂I={َ;ywFH;H6w^6mb<2:۰QGU'>/6KU0dqK8b=/~5Z$Ih( WöieIslZsȃVRvLrVd{5/~UfUo.~Z^ݽ^EjH-O^)iM˩~z5e\TTN}Y>.fFdma{/މ)YLcL+vQ̲@IF[r>+a4<21:R4'ډ"Cre 0!pN*N`| B<!DMdtYQ~#lELF7B?6qF hz,._1D/g4(\u&_uk~~xc$6T6&&'̉$}؞!jתsG%?@B <ڐifBdQ>$< ̄PKS ?DBVמ갥4H&%hK$U8/}g"x!;Id f&jLT [Xbԡ|*-'P4<>$E/+-UT% 5+^}Sx9K)f5#A2Z.0K{ 5k!xml?@BF~0$od (0B0F bVk̈́QHqWG[0$ %0\e4Kv9xOP֦yvZM0 M}j=dK?@B~rOu~iuǨ,g;ohS $l*lʱ//duZ4jK$>f-*BڑkxJ6Pr\x!q,V䞵!j l`LL{P֕Ӟ _B:10ؗ$ 2gK3y0{ 5kj[BCN7} 5S=r-h1)_g0, 5[*uG&?-@ҭgr5uo]M=vY;xbvm>y1G7OFeյ`WaF F6xb`aH`9>L3HA3PupPLO`n3=@JfޣkO= ;jYm%d<ᒀKSD?@B L=-n*NlLD##e&"xnLM6/&6VYi@iK&YZGPs9Q'2@\D?a))Dӳ>DB IA CHD i!%IEʞgC$SCS&形H3ے4P,+-'#2(Vľ1 /;tV?}<{{nķe՟v=n>+vVTcRfy;iI4Stt**mnZ_cǍOq-ùM}Cv:}_mzu =3g-r6GbhmN"I"$S>] "2sֈ\mɵwI!o~"ux\%nGsVNjƃސL`z7]?]~W1zY]~XOogO,㞩H>\(6=lhu⼵Ѵ-*K}YA[}za 9#HU)9n)݄ !`ݤw õ{`6[I"@)>F2=+o#ur]>x/8rOTV|x.8=_A%zS&NnxYHZ3HtGeJt ` $O G|N(G:W$E-FC0RQu hK0B\t狿8S}"2B"q0(yf8o5K,h>^|l2hɳW0#5pBM>ƒ)c1Sor+ѧQmL5+V"D5R/NE\,u(JZG'y\q%I!Sn0^VX!U*K̆$a)eU< ^befD,JYb@:GRVTÎA',u^fǝ9n WD 7W$~UNY{@9tB*K=mT83`Ӈi)fK݊0"S.A޵6r,׿2Зع5"/6F?׼H>DR8KE.Zp=]uq=FsΔO"JM$%fK@A 6=$xVV̘C̘Œ)afLaqx"$bb6A;\Vmncނ^=-fSeTR!ΑvZ+؂<kW&M^XF>Q9DC9DGGj*1!aaaгn'?k\/vI_LP-Do#p7p(X[W;XcٌƻZ3Lm;ٔ1Lxl6dx! M09bb$ j98 Q>piԚ(c$r ;#.%Q[%Qt|p-/ny~wUF 5xنCu ZNo}yk ˲>O1e6nXD$_;pHRVp73/ioUl90 /~TيٞU %9mq>PD68k44"9P Ͷjc#P R6v8Ae]R6O2Xlx)D}Ag'/"xA"[x79EGJs6 Q%+ »^kkdʒZ "="mM;2ٿ2g ~sCe;dbTK*;q'Z dB T@EC@ T@jHEKGai6f#l,lDZ@Zj+4p=K !i‚9M2;d##p&J_8{TP)i *<)\O*\V vYde^Z*r iHY΅̀>k& NJ#3\|iXʿr`92#c92#c92#c99B=)G^E^E^ERDEj8E^E^E^E^E^E^E^=Ԓv`rßv/!*bb{7}߈TSiD`e>ZIMV<ӺluuL)G!^;Ż~tẚCZ,T. ǽ* }9R2ޥZQj-k+,-Isfh߾n}>(>ݶ޲«6yRo=ωWUVYZPQm$޺p3^o[h=fhu˸;]èg j/6?~ Ӈ^RkA< SEι@f/5rxIlD#y+rܯbbj/x0a/WEm@z7 uLU#_1v &JS}'N b//o R44~w7C 꺠+ڕNUsf'JO/Hl.a\D+)\'ʜS.Hnf{<}[G>Ӏ6O|...h󧕆'x;(vV8 Umnt%c՛$ހT@'7LB/x58yƬؼ{}4rTuta %Q֮?.2[ +(:]]G@yb~jvTq%GfaF%۔b!)6@ ۔bRlSG)Rd5  Ka,$@DD$@DD$@< t(g(`=ߒZp7#ڜc%`4':z*Y;N3ݤwqkTn|Zumxگ7VI,#jVQD,ő6n8.ӡ]\]~rK<&*L}xwj>{›ĕZ?qjI( ^OwnX Xq&svLj*?:y#]bXk[kàyGJ5dp63V2mi{v~涧-:=ZY:ۣ94mk{vmƜj}gdVSknI7_ysQM x6z5{ Jq+Aɟ;H9Ѐm&~{#@M*DNLN!KIn]$KYfY3"@=Ƙ~V0f|UA;񄚍yr( ᵧK%LDm3G:qO)ĵc\8q-J ca3}0fg[aa3}0>g ca3}0>gd;L[rF9 Wt)_|s _N+yaiKA찕j\H(H˔Vm؈C0¸ kTYrf<F-)WOpRp!"9^{\(a/Y c{79$MX"ǔsIf`PlDpѤU9rYHgLOR-2W!FS&8 ,QkΤ3%sTg l?0=q8xYyB;5\q[m~A7I?SGO+X"\a`f5BZ'esa-3Ϛɨ7Ƚ(]IKlPz_dK7XBK|qOFb;wtZјsN)ր&CtR+1(4qo t:gDG#,Jrk9Y7g% 77B{zl.ˆs0=_Zxa]+QzGzڅP  w_^/̷{2#E_UxriNȚgdE ה]2I%-}bS9]n?jJ{|Z,>)daa(yq&+,LpZdh&Ig7Q:IpKu3p&v6QA4nsA)DJ߳I1͒L{)p%;VOp]&w5e}AGd2/ #P9;B4SB &ws4x.R,k1d!;}8}`n{_;%`7'":Z1y0H5+Z}76,|w@Θ>W{%02iNb)L8nz&:= ]@RmČ(羖(E9&h6\eΓ& ˉ4x0sl0nHgj|!D*})`@k2S}0~/8E8":::IQ]5PoJ?'hӔ a1jR)ߤR ]ke㸻i2H/_7 ߹*>X/j`\w㽆j9PY de7{]8a_?ّgV+{x/w7.v7 M_ҁt"XU3cx-w$o¢qǸBt]rNRƹt["R6efAUY !#zj1!3500Tg{ ډnΓGa= m3mVSG֛|tPA>B?c#>FRa\(#>F1|c~v! { A/it`gDLlzNx\bFgdpLTh.,s >_~OoAj>x~b1PA)$iH2oT(ׄoM6-:| |uopPb)[ fL2 u.27\l_eԧdTĤ9?z fR]h1Fty0s30s30s30s9X?{Hї6 0\zw0h46Zܖ)be[R^*L&ŋ9[wv0zt /5YWR/k m7Ww9X( ָ4c{_V }Α;<*ϣb VPyPisP:; !6hZVʵS SlTj('К/#ΒpVWZzOxj]訌 Ŗ(iJԟB9OHd+##pQ,*jE9&k] RLs,dtRڠ1c-DPѮUzDesm|i/W%E{Kw+}D?ī05kůFS#Zba4Kh)Ƃ1[CG(L,̙>*Eh1Hng̋O9T)SUThy "hkeSPRIN̋ Ɍ%!CfcuBhBArdLJx:ΖRΎnvͫQ>d ZiTFER(yG -Js&h<#E QAsn0YXAj(h0V:#UJ "iI&p2XVgP+ޏ?i𓞅P[FWfBtAiqQ=D] ({`j 0@d DqLjcmlU^xUXU':긔}㻾í]~;AxtB2dfK#Fcf60<[m-Db]FDxmwГcXd[ᣎB<3as1֘(a;.dA )-kyJ+E67(.g2M 6%^8z޲7f{ш`yWI`V%z$z,PGG0Y؋UrF <}V Ō%̏{E º$m__/1> h$CaFr}jmK ?y4${J>zFAf4_ B`qߌF wAxL}=J"6z*ԅ)jQJ|$zq L ͤv+K|#`"xvCFE*'a {*3ǨٖEV|9288GUOI}6!yy;8aIy k4^O=Y-j15l0*bjv#c>;d|tHʋĦ\Ŝ KTLG!GE  |ʾSEqɣQXe9'S()gvʴA>9u͠o^ݎGWH[:-/&$t-Q#-|0>1xƅ! )69$Rk'$U@ծ-mlX4KREIؠ $v@oCUYR-\1:D?]l|IZ Un-WC8e SE^RPLTlBt}FVd N)ֹi$8@[Q!xpfF]3}䠋Hbf]R*4i. |IV91d%#=W{ ӯ%|v0췇M>ݝ0ec; !-YC!WD\ixUsvCԴWMP#XW! `k}Xbbr)e]< B%2Z$_L2M*w~21%$SuLIU-jF{E0 RcNP_:[Ļqm=q5F'wcUrf,3r|i*h'Hf%v:lA'/F5ᝣ3Z?iw u_IBܛvoXi6Or|TY靌g:=ai׆͟sev|q^r(˼{"GSO{QPaF\<}D}5Uטn_v{)» >6 ">zl}{BRrYj4^ f?Mr&o./ iŸoXZV^m^^Nwy!z/4Ȇ}M ֩>yQߣNj<6FXm5:w'h7דճ\RgaVX}Bm_{UdȖr"H;M{;ө5RYZǾ**%(}T$us)X Q,?`+k6s 5ql VNU-|r4?i s8WJśpB`[L!d *ua[I )u  ΀GȞT/uLlRwWǿ8fVΫ]_gY/'I0n{;9FI]LvNyBOɻ "P^Z QD`:˺۞,~8>u>Sv Xmj C-}Q[E`016BJyۛ/+˃*seUeUGLPz;m7}$gHj7\1O(5{kNFT 櫞W;VXs2T?Lw_ݸxP@&O|=U"n)'C{LPOFB F74G{PƓEP=Y,ٝ4Xmu9k]g!DIGKiN׫B!GX]3 נ='C `Yvx:{''CJZٓMӖX'N RҖPFJ΄b{}}+yƙRā¼OSNN]o[G+5R~a@M`g=]goöMg?=ʒlH c𴭰8'_-<"g =ޣ-vYzPY~:K9u,r*Rp@6+(oZA58KF]u3~:EZ]h u*\褵n}h}mѦ6JarZCl}v $E/ mC.eZ"$G!GE  |ʾ!qQc;tuYUiLq݌׬;t<~f# Cgo zu/lێ C[o/ 'gxzEӝ#Myz s`qOcDcĝp-eaF,n ÀVm+]Ko#9+BcO6WsXTvgK)YrK] %JI(Kv2SɌ`| W^640GRiVpT&8r+.3U·M"xC O|/[d+xߦY(3y uW|UȋCZ(=ŋ_T&wMa̒! FyǺӽx)|R/RzjϿژrN$\7N=SJ{=سeʋ!-WR+Nt=5ڐA.fNBWH霌oWb]fsZg /w$U6F)D;M :s],J@mrpk m=.IQ(\܏]~f^ϩ\Ɋn]񫰽cizGӶ͜q5{wrgמg[l:EOWCvm!6?;3Aͭ7kzJ" {+Ofg{9_d8l޼= '61Tt{83ۂ*{Rvs2uQ:zs+"3y9!$xhA*৛59F'1J@fVD=̙,ahnaH;;{w07䬦m@ OwnaJ+ @6/4B;9ģ,LK`)1Xh\Զqhbc|G},}?7#\b\DI]D"43 y H4N`8QR!Ι?,o9X{g7L]XG>ۅQ6PqNS q2/ ZƎha6G :]*<ݥ3K5 qRLLFGsΤ|ihx _M$6"TOL3Mb9̩+JPA ֻ?ZfH! e% PRj"XC#w&@)Q P)1Z!Q2g@KGuCnm~_}nKͧm`+;[Kq}]p_OBq6 vloza|͗X+ɊSԽ04ؑx78րRh^EdBՍ7OK+LoKť閊˝/ݮA)OuF&<Ѹs:8n68n`P$¢ђb<0唗Z)}6o$'u;3)PV*N-a,֝Ƽ^PTqUFObh7hS#\iy856aǗ,n;M/>[{l }7KsQBdD Pg |T @YB!&@ "G[Lsy,ipճk-3|uGZ8'F'jY IV%r8PH5c+02KYN:Ftkxa=+֝ի?/Io F7o.JgIJr pFXLS8ƠK罴hF1Յ1Ӈ# m۟o"k{dJg{w6jǮ!fZ%idɈZl*f\ 1WtNX ~Q,ZmA̒8_(RVtas;@="8bJ,7%-5hc da! G9Xe݌KЎbZ0|AЇI6b#MȲ2v^ve2u\HEBBM9Sz [q+.`DJ2 g 8^o$i!5hŀC#D_Sj)5RjDMf`"NyW2DC!oRҹG9-B/2Fi쟳jB0 M  eە٥s`jzAAm͏n;^Z΋B?hxI+fC4-7Di[ N{#U~ bY =,$m@xѢ"D2f{5":R}l w^?+6kzS'5!WDV c.ࢃdnF&1U4H,  UDy$:#*қ+޺OgS&+ZF|evHh4*AS*u֡D(D*[+S8G%Z.3>%XtWhq*Lg{D?DΌ*_b2A2>^xSTuL;VJ8]赗rj8wW`'&*¼bJD,ut)hXK$$i1&Ǣ0*@C 40b hf Jq+ 82Q1xΒj=+ ug?L5q<ݑW4>v0l <1.V =[ۼ\7Hي=|'˟TMd;a(5C3T0as0igDPB0dU";Go|4m#{[u1xk(vB|[7*ɉhw2[RLhQoB͸-K@RZVDpWd>0'FK+`n g]pv,55Y2"aJq"@[#((2!Kh>k 4FtJKq{[ ߐDb&J!$Hf'Z)C݊^{-THƱ`c)U1H*TMEt]j!G:U2dO9ɟ=`4Ou hya60cuK%8 A1TBHI L#͢L[6ºT+4,Wﴠ0)G"ED[J IFem {b:VuєjIRQ5D%U<\2P* >SpбeմiUNiZN%zpM]DtCgMXfjw,[wϴ9;UbJTTbJT WbةY%vNة;UbJT*S*S%vNة;UmNة;UbJT*SM69$DhGPus F?~C?}?Le &c孎͊,*oFO?|>(}b8Zo:1 ڄ.'o{t0fʿ?ҫOmq4F?\9.M;B;v]=ߜQ] !8馹> aF`ݚQ6^ށFout<Mu9/.DtfwG[m=uLQUJW *_%,GrTJWſUJW *_%|UJW *߻,/TJW *_%|UJW *o g\̿AqyF \ſ _MGG~A4N8$&rTT|G9:p*ֽӓu kXKpZ!5%(a4o+ȝ "tJ Rpc)rWv-JG]&n_nco9SAhWy{SNW~& mBm[4Nrg7C]~n~~1m:м\>1r HQ7k9Mu l]_g2tf߬/\y)+Axi[vX s-}!9`+VDZ)`˃k &RHAEPRTW/ 14E͚}Lo<û\8scc InL*$ <#:j>10%ቕ x6g(29=j&-z렛WCH|(3nTPy&PI+^E\Ͻu]h2vُxGODx?˧{th@ =? zŘRB}fjULY#$,?7FNV$m"@li YWvhoդެ r %p) bT;(e80IfEy:鉶6SoLޕ6#" ,> h,ݍfzvikJԒ*`yȒTvP]R*"ȈHjW:bj1ޞ;[nnuS>qJO//ɤU#V+RiO^1VxdX3ˉF,e5f wUwp6d3$虜Qllw;) &dT|jLOJYR)b)UKN%0ߝ"RQtǝӨ4Gc.+U:zJ qN'SS-D jKϵv>)WןHf`t!JZ@\S1e Jtto.AKKjnއL֤-%N'.^;,DdRvwn:^utL6bZ׵M6%?z!  (mǕrwl2[, O(Ee :&AKM~u7 !˅MYN8\48Ӹ=ݷ ZN>Xo ,OhE6TݻAˈӢ8rAur' ccA<$"gMƩb%!D!tbEڀk Q$Okn}À#!=LukZD:5BZP\{^5|Q7&aAI8 [Qp_jԍI]x7&~ލInL 9vDC':h W@2DȠ)f[J- tyi2 r"h˚Viu2C08籠 N% ;ɴwu\ Kg}^/w7@w! ,5.GІKf"*v,~ŕ1 Ҡ,xyw˞Bl)'{]teq$`B{¢%I)AaXvH3FB2ȭX(p60H,RMJGx%"nAat` m)Gbʽ.(0ZD"N522JEN(RcSAs])$V )B⚛ml,S!w; g9us㚵ޭƓчϳRO}[Ky7uG}ɄBmFz4_Gﲰk&'CzC\9uZh9ן<^򫤌RB7ε Yc@(((RbͼTW,Qщ2+)) ΄PBT+v^ :TJ8ot"5k5\<2Z)'T"1^PRC鈳ɝb 6"oP7PI㺐L&轱nulyf/L^ MW]/:S++l@kM׏@, }S[(,X,'^{YJ#qVS" hEt\;+Xy|6nKU9] "2[֢lh·qE@qMA3ajm!]ǓWMѶgwGL4:0 &~ˏq 5j/n)ǧAaeMD7|)M7<a3甏s>FgpcXi FNYGϫc6=J]SWp9:H>;zO2w_';hJ3:"A_u{F]^u2y0`pʨd^3z }K^4We`N`e)/ Ft!ÂZ,i ZIx "UNz8gN{@wA̺|8T=vOp470v&K-&cFGgBGC~t4,D)94,%TC66BCUDo#DÍ/u1܎<ZL6StN*~}QEO ';|{0g| a@MW@Rn$2 |`E9 4qP)G9~lY{b#H>ah PځS/+|UjnZ>T TJIpKj$V.:{D=1?kc}k3>7;^ ^*AWvy/E52,‰bǿo."T]6avӎY3.13]Ҟ6">㝗3Փ]n/AhPn-TG9 el Iκ}PEѝ]e033 >5?Ila/Y\Ӿ-gO97>N]y!z$=|kAǙy:/o-lW,ee؞Nt*C"xSSԣ =hocjfoa=,n u]]{♐gp@ }]dxյ z H.YW)L9{A:x;+1s ݷf<Ev>Z2ǿu0/J%z@!U`5TQ*=0ZRa`aK/Sli)A8qBy@Xq)ZBu&Z~ijW:b,ҝ]!:V73 uNO/3 `ъ{WJ# +f*L5cf9 ߻ \SP<*{qM!y.*qJҶ$>.X?.tLS; }pmQKǁi?7~Nb6'SIvVYӏm\7gFƕ%t&3k5-u6R^OJ)ߑiq:2.\ޭ&{~W 98G fc-v" QtaTERNY4%98ZA FMM|6]jny:OmѾyDuIMu7(eel0g;u:ae8Q<M<퍩AݽvV6gm]iྜʓ]qɦ(W ZvByguas' $%\w/H; h\WmF e+20gim֞wHUpYLZ2ފbQŦT,HqB8'JuD$so<\'e>qx; t}vMt.&lݖF|޻]"W^I ^5|p;K5,02PPz QPwi rIOxKu=zD'H0=z,),FxnUDL fq;+}yEacd)EpARN;]T?pZ([BL~ {)!C6@I9N2FiGnvr{&cp841ZA t U?ꀺ)g/rA43uEC㻅MYu=+&@>}&] [:p2Bɼu\ B.)&ڱW(Hwj+K~@{1t(5JT-40ϝL  Jg9S-,T!B(!H`OōEK6$S°fYFR"،/&Z+hXw8EeAX5Ӂ9$QՀ.k/B1aj/;1 # QH@SL$d2/5+欎>c\tه"$.R {Tm"nr}kB[) x Ág *$<X ō4xl`o]"H&TbՊ|NUtpS*rU=ΣCbN߫H_$?5.]њH8iFYP:,ArAy4>-㩮ON}˄Oa</opg?nhͩwWƏ&?siw.czq4udr nM/etu) }9oyҪukL}krk[{6j5oiiQH^.jSF"u64iMM#JH$r|Cן?|]@qHp#ǜ?|G ӛIroty&BX!eV8r$p!qd"q'i$^8$~}UfXYa#VT\:Bΰ g LX+#}ɟ ,4)g1 Q9%"!VDGt΅C%^(zZpi5'C*gL=E#t~6 ǝ]m"BsƬU~6׼y>.M}FM5Y])t A Dթ.crJ`OB~r!k H%%VJWjq^T꒟}/{@DP>j=ѳ|} a+͐L&eojNsVlԅUX( @i)әK JA8c`į]E{^> [J`D2,ŒO#QK xCd@i6No9΂tAז1W:q%L v8z:R i]љLxJzNH*P 0`Cp UҮ 7L`"!LӖ( @4DY -2HI)mƈ . 8^`UGspU/\߃9ۿgw&'0ì绽ڕXCOkmH`H~Xs9r)3K2I O̐Qr$6ER;W4{Q_UWWU%d^_r`=% V9Qhdi6`n3V0Հ4׳JbYՊ,Z\XXmUe !>/c^>?əbz;W|5ŸYm-gCG}[!-,@Q`.0 $- T)-VJ\:Tcu8{H_8+2+/Gk8^H1 N9ťoW12#XP1g&fᵷzFqU.y:{^7@p J3OAeE#&h3(* %B# b)I"D)59bHETB@43eNO>)Z[L*&!OR9Qd T *b"$DM*uNGTtd4, 7>2kżL1GF$GBb-0FF9'Nq{Akz Zn1Nzo P҂$iK9AhGAXNP#‰V`U J"q 8yX)6:6$~xt]m+"'q8k6ލiIL P)B9 r +.-/lr/3Gv7 }J=?F"/e>]{Gΰ,! )!cn'JD`рJĂX4= u]9 wdrkt͓bufnMʪɪju\ HMl7",#RTV]>5SJڥD ~A5L (w~B(|@h" \8( IT ֙))rK)e!Ra &LIX8FY4p-AY/5^#r\`%MOźpeǶ<܁y<*U$b*S=ꩡڠ 4F0%iͣVܮ PS5 TxGHm9IBfEwZI0€#F+'MvrI Xu"bt<! v=B +g]lTs#89aЉ2cQ贈\F7Qyj-ԇ37R2PP*X""9hxjԊ{x4SK'HH;k WL(| 4n<J顰995Xki$RzB3FyN/,qL'`N @G"k v%8(&ul,E: KR9*˾iWE*քR`b5R$NjU؎lj){ӦP>ZwW{\x;Gjw \?NQI.)-oVjU`Y(}(n MRKb`o.4*Q߂}Jq-Mk4{Hm(._oWrhk]~p@̊]d_}1OSL#(E;Ru}dzl#Xmm%v\1J]oZ (ەHe'јgCHKUssZz4;WhB}H_ /o‫bj7E=w;]%>ƇPJ{+Xr.q;w}/~ o7)]{M][८~{}^iӢxJ8V׏bn_{;)@/U,i T,rj;KuI_T\=+_. FAZ/0JJ)*F |,q)ɲkyS5q- E[1 X5#7~}v J4'q]^r $%𥖜N)bxЄ^vc.57Px6s\nDZa} x3'{}ZjϞm=jAgtmǻLYEӉ BsOi wuTo8cpkfWkw7ʳ"o}@|s+֧qC˅6Nw;$˱kl^>變[e 5h!ש0 Z/Bmf9 ;yR=}ȣz0݃C \j}uWtyd-N!fL)m>cF)No lr}U|hLT5 +NPm1.>?MB^bP-WX^0ZfyTE`3+a56'm>I€0N2:Q##*y)0Z<4I@o-ք6C[T!bW Y(vF?U j-&neI&: ^P|F-1ȘtbUX3/>\ :0ƵoU:ȏ㝥RJ@ U ?~y:-w\[S)Zj>_VAWΔ}>1FF);0+mC.A.Ok' 5Swct=W{eh:`KS}`y( K}ǨDuFMŏ@R2nteSA}qg.}\wE\ C2, N\aq|D>xr"wB!,& 12(PeZ30{-#chnE 1]e~-tj@@>W9;MF4IZ)P}o#EVtʎpet M3_g;ܨ]7Egg x,w[6O8JTޚt{b{שOj~À?.!Oc(u ez{NWmvܺUвeCݶOz~hƺsvzHVCw˧oU=̹_PuGބ:su;#f%Q4_6#w•ۼfZ-{79$Opeo˕=we9Xegx~vz N2poJ 93`k U cJ#68R$#/g)Oc.*ĩ^Lİ;e_ȝ*Z}jG0AeJl_;E,E% &bR̃6(SZ8SҌa]¬d %$(RdKJYH6KDt39JqTQ<:2,"` sGkZ HӞuB;sStNir8z p2s_=۬w$GXÌƼc'zJ0 rdHm g`yއxl4qMHgXH-.2"AB g-|>_nU}3W,;W|5ŸѢA8tfgs[!-,@Q`.0S3ʩR`[HtܩΚ| i倳2!xM(֢Q(#x+8>jK ^Ȍ ^`A Ɯ\ފr'w=K޻Þ 9ܼuҌRBAeE#&h,hcK${,_%KZ֛ uS|YB0HbWkxd mֶGK,: (-F)85Iiʃ,e #@1TDJ-v6UB+:'QvHc$wkm`F$S_k"\A|[#oE蘔V*8+K 7%Pɚ` Z4hB G{𘑱GAGk\ta^"8+Vo]HQe}[إHGKJ"} ܑ4{H[9^8qt%b 1|a?;_yB(s@iX[0vD <輍k.#A"opzQΥT5R<_Scq;9 a>hTd.hCS0$Yi[2l6=%֙6W òD{JI %x 4(wJ n$*#Uw cdߴ_>'xweImO#Ύ%gZE 9R= "h!"E /3Ps<=@e8s$#Xv=!PKऴJlu z Mި$uECmERh$#]A1 %͎&-(?v<2!먖TxJ%:mDQz$`sK#p,kE4NGQ i}QؽNݖIبc031O?5x!Y8F]Αw5[AXyZI +((qf E C Ƣ`%= j".:?=(|L7:Ry,R)#3R9WyfɥGoKUe粒m-~zWaX_/ߟFIU4 zpcYP#p)%@R Us~~No-wfK׃7Aq#$-D1 "cuD#/hDR b/nGo_>n9!g9`Z$b9Im`*QfE P8kT4*c, Ho"u <-՚1I+tW1g.pN]rWY )yg+ֺQ6sg"\D|ڂ FƢB83xN 8ܙ`{0M0؞ɭrէ<_'~?X~'_.WWy6^%]?7| 7%iLvܙMr:wT|n؏i&~iB[4;e.ʢ7j n~|;=C37upsi'&+ 4{»yMbp?F?ofh%?>P#[/8eɿ;d@fe!WGVbmWRSHoP;YnԓTj۴6W B%[ҶmquչC%NYf%n[ϸX@\b>Ǔx p2xu_1c{B>ԨV's7? Xf- Z\mLR ~6< ˄[?1Č+1q5/#,;R9e+;s|ʅbƝ[@935vޖNZX}<~2',x˝в">y?/9%_م'q9~,qCĻIs~6Oy4%Rw4 5AvSWusϯ{raCOHg0zS\xX T\c7uC׹ 2_,^4\ɱZ8iQ$Iɑ- Cy 4:p"QfHǪKQcY%zR,pKmHepM„h$+t#/eA!VsxLO4)olPVC@I 1d؝P}b`\1Ugj4￯w'9ZWړ+^IM*@% :XX@0nFAA-e3D$rǶ;tH9!; `E!UHD$OJD;j;F (@d+roDHRyvYF~HO>8ul=6Jd(z)džZl}%h#|K|Beݢ<{䙲WF:8q1>.n'?_<x!6VS3[QYj_1MMrjǿ%>^x3}ߩŒW/ߍnۇw`7Lm)(ռs3wޠ*&/tݎ}x7>Q TG/ƞ1wvOȮ^(BAejWb5g\]\[ΝZcRE)x1=E|>g7=_Pޛ} Bil&S:A2{IҚ+l沕Zu[NMaB C3i($f 9#DUWcq&f%jޛN(M vNއ7Y^@+"d7wBFظ.Ɠh~1Pc࿹ܴ]m&Rh4mfpŠ$_zqRט &[/m業ZjKCk<{UkzEq:R cV}WXPTkۍ#9ڻ.)/7Y B^EnߓMGb0)YqDV,EgdV~1wpH> -jYZ]۬L$+>bd_l)Ԩ7VD#JҦ@V2`meY2jJ}KzvU')*4l3k/W:}YݱR}kq"fK[rH!R(&i$ X3]w/ &DW-JHN…2fE,B[4ʸRQh6UD1;ǜ-jg㷮BySZj"B>yj-%JdIT0JpMVdl)I%eGkAmKb5crIJ[׌R8* LTGTI B@N"鲮zX΄b ؔqmotʪHR,-uth`@^߬22dYZnuq%E#4)hR!TT2dM| zrD#5ZErc& L \B x]FLۺ:=z;The+:$DyJ)Qȶj|u^Ob67GM=t1ڜ18'*JI`Ef-@2{gau^G%s1DDk09GYGW(V #&643S64$ԓŬs֞Dq M*\ J{+[lڕ@ƨ%gE2ju&)(J͕RlSI*eoh\ zXoRNi,ԟȁ #1٭(Pd8gԃshngug( BIRTCl<@cpy27q,jBӘH5e :k P i\G3=U[˺ 8܏n5V4mZTRZJ&*`*#rkJC.0(JGAE󘱸.@1x7 ~nVor3E ] * $(2R6M}(,0SlX=2z]C HMj3._s*m 2Zo1SA$ D&XI@IۨY"2{J[SR :j,r8b'M0 #| *TzC';3X% qc .e ` I&- pY3T3!(QHseԸQc MY(h@:ٿ+8)mݕM( ? ]FkD5=))E̾짇~cK`OA:U D:-QdJ)ZـH zʰ WPmF׊XkuڄD}΄kB ^z1!.E5 (Nw)T'rvH')~ȕ@߯fḚqy<=Pmzd3u$hKpP+3Aƫԁq@82,}rG*(]45-Uc󘂖[;ƍ5Mj,Z脸Ԓ"4E"e4iPyUF}p|RT+W0n,9Ѵ8{*E%AYZ{[Qx3(nCe6 G_3V&EXU_<{UbygD%ۊnLj&@D*_GIWAjz1i&! p9X]%_.$0%LC_JSД~XRr*am.FttBtX} &vnjN|0rFQL?ŭduCJ&qH4"Q5@l\j%?ce_!Գpg& tf9v-Ak(RIQ{)pɕƂZ~2n%,B jt_nk٫&!Sj1hm7a7^y/`t^f~L<^%ޙDt(n\@L3JG+f= kpS:þ{KSp(r2(uT5f՟Fj99%Eo dL L9ꘄj#Iǡ -(q%Gd LBָvGtE#BQR `j?mz?^ܴXvŠv,4!83w479MN[xӖ_d?UіCD?8T BޛO}AT[ٯ(ĕĕyb>)$G sm%P @+苩wpuVI۷|}Q]N'E^>ŏ<;\߼ fyP+.Z2Ģ%-hEK,ZbXĢ%-hEK,ZbXĢ%-hEK,ZbXĢ%-hEK,ZbXĢ%-hEK,ZbXĢ%-hEK,Zb->r `GdވLy%,Zz/rR[b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V*+'%s(`{?z% @OQ [YV@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+z@!G s@!HzJ$V=A%dg%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V(Zl8jvr޷Q _εf,/竻@PQH=.uR7}.uky%X¥ \lѴn޸[ `UR1\=E JkOpI\us=vVp+uéWCncO6|[!|rb= R+puw~S盽<0aL?wًzy:[美V:H߯5A1nXw/?F68yOKa(DD^ sbd\$ٵ?;]\s~ޔ^q+?&:Z9:ʗ79フ;O'VA[eZ/ZO;b :-Ӛv/>[G;˃Y̫xy '>_tXCl%->#PmR/# yFƎY<[xQb:?G5p7u5|(A{~, Vg'^5.rqR/ƄHѰ)F7e2axS7m>QXsKyYa|{|sd6li[zxAB: B,mV"r Y0ԡ?HY?hZ茟\kCoߗt]Y' ϞJ g982BqF攆*}k!{pIA$O1wnlZ.6PaH&LzbC* @$~ώOWqz "{=y1(kNyN* DܤK,e/~w'f-% Ƞm/{/;q{]9<3-]z0}flb]]򇾝Y qf\u=Nt"~D~2׭=;?-<$ڣ2 BCnҏLR~((i;_oDc_~ P,ξ!̈́ALCf/fgntL`t7ܾt֊k+Sigwe*Ѭkʚ=np]X*wZ>vVe}ܥ1=uc}fzprnkj]tKG0ڠ?~_\;B:~`8l~I/bu_kzך^׻kM @A(RJEZ񭫵Դ%褅7hsosh^-;m?e}ot8{mR­2_V{VGu?f7s``LOwm\l#KFqvy~^"|xgpQeq]Ms8+{^DeN۱{> ֖,%6odI%٢,̃H`22x˿]^%T0]p6 -/_b#=&ma&샕Ŝ M3 04!'`]&ڑPQ$d0j1t`1 "D=wQz Gq&T'_n)>A(km ,$>2fu%tLz+Bi+E?Szkֹо~OFB9H)FT2!Y 9GZ/fZRh͂rZ,:i<͚-3ה sw=hz2%lew7fGT:cK΃@́\VRLjP5(DQ&#Eْy2KCs")-?(vb>**ۚ9mpu54]nhߧGOϜsOL8w{nA3ߦߧs[=y[COh@?Hp4?.ŧqs?7m6 i4w40|K#x >{ju!/2Oz oOCVTmXmM~I+]Mm?aj˵mzμư>L~1[,s[^HR4L`>Ė.mogrA,bʋBۺw!ܗ~wAj\RJظPUP̚櫟j 7>7+m7BN5l>kpp$Ur3muw33aϜ.8kCw|^;r,G-+צyj3jJ7bg凥{A+^I P\{|ۋS1/S9ΟpYXvr S閨XJa6\-7Zi2K:c%G;^= {g*ESU|s>NsK|j^-p@!(PP3Ǽ395;j'}~48Ԛ}QW̷iq ~iR6 qNʿxz{[V-Y˗rZfw$i^~1T6`rMu1Njuy+h_OaډwS Qwm ? w3|, th0kk`7n,5&ey,|81R(2%)*}Y&p)6hIL aM&V=ZmZta1ψVklv`;,s00/g[`-vusa❍914 ]tL-*}WzhRr]Q8<$Ϳ}lRsNDyNfNZxw)4c5dãi179ߝ.`oY\Ps♧ThR7hPsa^m=03fR3_C8\h鎛 CZwJkκL t2Frk$o;cIʕ|Q⤃WHW=sKքj^:vĽ֒[سa4T_r\1%r|+CS5޴|^-N%ʊU[AҼrSeKYƸZsVZ3Xht=w`z\/w›ձ5TAz ZDY6IgJ;L)Ɂ H@dl1m;u.bȌ4Y [q 1g9d-SIYJD|b O0rӼ>L#?51չ[d5_u]n9{[Kׄ mbsx;%oZ LmBK>#1FmM*څ\H )icjݩ0P*Kq>%.p :୷AJtLB9C0DxEV:Α5/!`)=Х*ReRJ'ɱ($3AۑAp(X쀥ؤi(}P-6XXm@N97)`) )2+)N賊A,lWbT엍ncr@'s<ؒNfA%ɩbFZt)5w,aIܶTw~>?' 5׆JpO2hH^(ȭѺtđG8_7TxKEv]d%942tEw[# 3JusQGzNEZ}-"5OOiӷ4{㯅\%Uore/\I6pB+tmr: ޅT ȬSeXs>?,1ڽlޟex۷O5l֝V| O{{,!ʀF/FKKDR4 Tqh"AUZ+JN$).I|k]oί|C`oKφ曋m|eYW:@ A*%I_Qzv5tneFzCo]i-\5tR6nHO r張SUHWA\ )֣7mx˅Nv-X[nl!lk` %r24V,e fpN-"tHo-H=*=\Ae'|9lr~ItGo`LZs skhXg^1%*O](B|;hd~l]o:;|T )G|,+ʪ2p!k!Yk1  Y ۊv { UZSπG|]. ?O5κ f^R |e#CT+&΢0:i.ݧK,c3eȑb2OG MLޚuVhRe4hR)ie6XR =]$.2љeNm;Αks>7y}:Z/h}M0[iΗOnw M_,wcDظC7^ N$ߚީ-U|_3M~0I'_ήgwL8~Vi?(a'wPc9ڙa6oypF$NJyG53wY=L+\cA }y_h?BKٿ[YJ ebs] x9Ys,f;b!',,Ģcږ-t- /Blm Rs,Z(&!بeq>aƻDrVsrx̪&^]}A$%M.Bqn^Ɂ~wͬte QY's_Ffނ(N`8*NU,n! Yh1&:tR!"3iO,3˭ς; 8#M1N[)&ì")3,(c46$c6Л(cvY)c9q %ck ǤFYY`6۔:$ɼYg9j'3|ңl}!<õL(-)X,J$d)s: jOHZM1i&gf>b;  },QL4[1FVq1<;@Π>8iޑAP } 9AM.HR%++&p6y# @1 $)+kֲ]lW9N[C7m:V :@垄5B*0J!(R+e/'!eF4KULzLEj|c[9XGgVp[IЧyqebtJU@, b 1¯e?տdM[\0_b,5L wգ5VS 8!ē)cf,ڜх4İt!^ }kZM}HNooZƈΫ%dTk|I+^6(t%@z bSٛt<>pkge# 0FɌ`luYuЕƆ,"y2Uܢ0,ݦIZ Jw1)gJ}Kl6(btO1D,{d!_*R n\I|83ިғWۘFy+W2GC4YdLg3Dʎ*o[$ɤw>3I#ޒw9[7]w.~G) Kn=G(޷(TfhshLzÇ:gt d⡲L (ٻ6r%W<Nx)݇s`Rt%d{,o%ْd$I)v5UȺpIDPpqPZ.SK1+P3C>#QJ &M:a(rg%gkW@ CQ(4`NJ"$ثkFnt"iQ]9=Tդ&ΕxPK=|5ff=O*j/`r @/Rt$VlPYExDx騳xn'kuȎAW5f|;li;:dKϦk>5^ sIej.8:6qA0AE=+R˘@Fnpt>Fϝ^:;M_4`w锃v*/PE!#t)<F+U%]x؅>@^6i3g3d~ht(hUx2 J,XHbQk $1@%mlFwwqZc</vMHtU +_DZ޾/wjICG>[QfٸasPJqX{\s^nD=28aR0v?{8bM[1'lŽP]d !z4A2LԂ{YoZKOYuim*k&kْs^\.JH-L.9msQٕb6¨ PH G1t)]kmn\ϙ枝fgv4_F_ ܝ*@V*Fd d Vl E|) Ou_6A VIXAv):-`*SeBK”'#Qd֒҅sљ'BhCh!aOXdɘwsc*jPcdsLРV}zjDC ٴ$mA)ig-ab:n;Nso4? NF5o eND,`̡,sYgDkpc|Ó%H`_b:_~-8(%6̠K%CR$ϭVd":mC(iZH4f^(/D@bt9ҏrUZ'4#g7$ 4 ,8U)? lwX o|}>'\e$ǔ><$]TѐYd=gq=ٵ[i/` 8x,E qC"xKL᳒ԉ<ƐtjyrhK 2l (@ZЅ\p%\+ 18eYRmMQd?L݋Eϭ|)pwe>f%=Q1̢FBb *N_"X\`JQ$Ys>LnK_Q{gˀvp|HNrOl޲rI[gLp >p"8 &rftQۢ`.;u̽-k*+M}iv ~Nn?!hryYd!Uoz%_ckrȃkk}_gdU^lW7q]ԛW˻`Z_hvz.mo螢An9I9XuM5l }'=pX_TR-}E.[<}jrrA2A0<`nxփ# ,[z2aa}W SM.^dR[櫸@XD6-t\ŠS 6x;hSr[{Nn-8j93[fF\$,deixc1{%l -E2qgvLhLOx3֐ٜMܼC5}˃xnN[b+-ɀ^VC>o7<(gfÉᄷIџ@.?FS kr]舘!>y?n,K=kH!rHZX/_1-U=45O_0}eWx˾e/S0r @L)oFCI!C[gO}u(b9!$+E. g2]iv_/Fӓbh:,?5:ҲJ8|W]t[V4 }%vm:=|o~++Wt?z26;8OH&Q[MM7︾F⇥v Fޒ\t2Se'gX#w_}ЅЅjd:")s "?,ߖV=Z_nXhӿub>;tEPX~Q˻Fx{7Wzſϋ647u.hCQ_./OL' Oho\sǰi /f)0Ġ=€srqLXRZ%n7@Dgo!XE[Źeb_Yڻ,(AB'CoR|@瘸3ZrF6y뛥.砫 8#? fzuV;:N%c'1C H^ ]zA R:@>#)]gZxH2vB26TcO}lY -}>U7l ہG+} fTl lɽՆD [ ,9'F/! 0?HpXn"C3Dyφ#,&ǔ8qk;zhyi!նxR@a>8f]`р*!,,r3F.8e0/ Q:@I#!J &~DoD|V.C"`**k'x Lndy>KZ(2ϩ[G)8mei B*CR>`EXU/DWB^N'wΥ"c#\5#g7$B'bHJ[_|Q<䋉% =os$,inD?br"SdS$PˀOhI&sOy ?l)smWݶOf~ܢr救a:loyww9q/;&'[BXsˡݷ@{ggqn]hꥯzno~$\ux*l6J/X0}yG$ ͘N< ޝ\>$x=$?Z{,9 1@ܺ P+kj EM֢01{%>h!}v6ZZf wdt^HvLe]uN9Yth'w*|h<` eu 6zܛf>/$AiV%u5(sA'_-I4JW -d-q/y/qRCºh玾/4)dGhN$b!F9bLNOwKhs7w`ðz& B &b x-E Fk9rb :25һ Δ Ʌ)g\FۨjЅSF $! 1DDTAx}69v xVokeN(L'ЇpӗȽ\>ZT)gWH̞X%0qT}6_kŗooskimĞ2{${Ǩ|O#.9A` g3k\zUj8AJA?a g`Yrk6Wǟt-ĮPD温5ڃbQ(^HQ&hM&,fu)`l )ʾ e]Vq J/ SL,h b-!}|*@6jX'%lZΘ6@5)6\F"C I'͍L6tC(;jXOdi'K;Y>--lFQ2(JGF ӛgz/V5b?j&DJ03%'uuU>U?>*H͌il6*'$+ey.zaaf"Մ0px]MBԮI^$% GzF$@΍WB`<)F|ƙ{]ٲ(\fjaځ$hV/}*{^$BFq[HiE],HHŀVf"3iK;Yn5m杛_앴N4Š*$MYkER24+c4'c63QƪvXv)42N0AdR l)O988ߍ}Eˇ ` X!EifD,E$>(IKd>@MxRg$se0iQsΜ 2(UY #?po jUZazuʓNy R4¤}Nx` %&UbWu(]"'Qqm5x!i+jNPY!s%2QVS-fEd+3Plk|-z 0ALh3 qluv)%2VAB<y먦!ZVJ`  }iH[ Q,6$6圵>)~WX o~7!9gw;B_ Q^O<\,]Riȝ-7oHT [yGkUҷE }6O!G;LwR)gX;P3gӏߍs4 4磯<^WqٻGgc?Ock]1Z:7ճ uHm =mMT!*P:U6FU~$:}:/,`Ȍ/uЗ"rd9S93JW%{54V2=b8w w%6Yf׶ x-}C>N~XLuj 7c|r`."ϙV(&&6\|KO&&%XiR>4)Oɭޢոɧ⇫MU]q)hޓObFAI9PSM3V *Mx/ݩ*) L'WSKGk)ҽ!y^ z.k| ^]MʬRw)) \׎$tQ΂ yЖ }V*!cvu>S7Mpq_n| M_seopz5=ĞerJGJ#=1jOس=} " y C+)רJGS-v,Pru"%S! , G͝}]C+<7o.Fuзg#NPԅLGoFtHyEsP$Y9\zqJ&TG㦋J.Z8t7MRS'7ii t"G㮊uNH">/](AMx|;oz/GݞEofhp E* m|<0ML3et{7uE iX<`#͕pdqcV#1#Z2Mzeb owZ.Iaroq?~g `E>k;u{ f@2A9h33:ˢr~GTn8-e h)dۯZ4d>բ/*LB]L3+'V>`MsmeFRMHdZ4$SVP.(\fAQg>Qf &?(:!9eR@&eFUX dgH5!%r+^Cv.~q_ƠZd) [X0UkJǹu9QO=F=%H=j)&ӶbdzH $%vN/^Ld|u&z>ԼUV4o !xޠ1SV|x9VۼPFsjӗG(&sLi )eK@( Ad"[oUM]k3#:=Y嘒L@D46E+ٜ&we;ns; ;mY yHsm K.#1 ٷ5ymAH ).id Ze?>HERB(ЪCqKI(4h,BmZlr)ȹG,NS`QdI6bJxEaGz@ؤw4>h...4*CbdN.pn fPh(RtYEnGрKʭUM8,_#C@{`}xDP:!tBUݸ k8JI>dcdc}*{ٶEmNo/ywlrtvb99$ŧο}Es^M/>\NߎݴmU~紸?,/vjj&Ik9 |01x>5Q![Qynb[ߋ`V_ĸ4z-붫 H}W.4c.!Eb_$^~~6?r3pӯu[nU5'R=ueؾ on']/n_6[&ζH\d#eeP:T^/teXMf?瓏>S8kk7~ nj.x7M-}d= " GN{ҬNw-H}l$ 0sb e9JH뱘Tʖd$[%P.[I1#Dn%mV Qn͝+@he8Ƞ!itC/rNf;&}ol' l:nmb'҇yiv~([\@$;ꎉj#kO%V5oLi cYfPB0g;PemO䛣BvI͛ rKrQ\JqֲYRTCu韲pܫ!缔&J]n$Bk|6oxa=tOj76T)&Ig7tdpM3[Iڨ}!&S& 8`OF{˚G:dz@ ;#IEK>1#*5V `㹂C)܅!j}@I:qZJL|hcϦ)8%U?Γr^;~X=|1{sQ)H, 23u@~Hl&] N)!FAxo>HM4Qf_mPsbW< voN%I"flڤG])H7񳾮訒ܩo.W6$U %c]|:t .i0"o*Dք$t΄qdi$s딀yq`G?&|xe׷_ t!\oVNvln ?@A=`V‘vt*=8z;NіQ܅n&]+]OLRi0Zb呧%^Q;|sP.3OE0y;5~z[˖Wܥ q\c.nMN-m9Y=:8aRO|>yI ?/F؊Y&~6$U x-&' ]-͋?и}(3>(^HQ&h6LhlsH5tC8~ %.xT1-J!a-ҸPǬ" lqbdl$c9cԤ@r2> $472ag P"jXϦ i vLIh ;PTt[Vdp}Mޮ~OHf^&Dr0sw}^g;oG]ҳ-ٹ֬v*Z,j~OuPAjfHLgǨϤ偻*dQFī0Mh O!jWRט$zKˆsxpHNW99{ȑ=dNڟM&{vz+Ж:?,vIn! Iw#!Hh2KI3]Ҙ3Sljt͒>rv% 9#N1VYɃ&Qd/%yef}9f>8e*ge'B[ƉG(M LX&u"+S2O9++Y5rV~ysz}Dp-(CҐK hEDVrDkA;G :̃ǏCO-B4Wi( s08Y̒l1:j' ~ғ>(x #h,| mp36ZGH9EF֒GCKd0 dex'0֗vs!o[Y:ϸ 0+5q i!OmqqHH8E@lr`p2HWV @ɚ qކL"4N`zLa@FqP!go5e(\&P20s%%.-!%H,Xn>T]T} RB(Hy*iGYCGcyEJkgi'mzF[Ow}{}ZV q?yF 1UF(QdNVܛLֱ-Hcƒ*;Nvg)/:}ЄìTX34hoOYId`PDrpт  9a$ɑ ],{< E{&iJd OoAXWu3."RN+F}&F'1p>q}n/ӴW1p7SrI2*0"bYꍱgJ!$=EKf(+wHrr/~SwvBbrH ZMH6Ih7t/ӲiQI=!N=?oskGVmX`WG\F7MLjoDcݨ*WG{"$<u~_] \Dܵt{rK>פV(EI!Z4$C{R~$"L )og,k @x!e$WЙ đйl6K}\uZ`L汻Gn)ݴ:+} qcڢg.y=; tr=zgukkwIdUplA`N# Z:Xzu̅3><]XM~j`CM{g,-<l $sA0O'k =ruub❅bk"O,-/z0;Ŕ"U&6l e_Ad%k[Cm.8 I˭mnv'-! l$4o"54߲03OG&3[(ݳӰlǜc W"ǫgQ{G ͧhLOxKHl&8o\}r9nNjީ u!:[Wr o}I CQx[iO QtJE+ʆ=| {C"ǥ#dvվRj\*LBq2R &e*dŏ/.yW>7ؑ"DU$b %J;fHs9BBL M xbZUuK*Ņ.E*e{D@TC'6LݵNDnGYgTp7zΚRA!_0? JMT)Mbc&  XZ+|-rıĽsG&'~ ڨF:Zyiƍi]ҴD}QՍ@ 8hX^H|vTHT'JYF9 \G`ZNƝEn8Hxd1d- 29TF A޺xHϳ8 ?? ls"cKh5)#^L3BKi&C~d Bd~f2u<1^OkT*6yMe5!5 l`Z Biʆ!L#O&e1Fス B;%A{qhYjgy&%VY Cܦ=pcpt,Fk29rYBWvXgйn16ҡs t-c{8pFyY 3M] lQZr;)}&~Buݗ^;W 7tn/چ9c:Q*R@l:}za$E)"b5_8EލZ^/ڦ9KV-C  z0ѣ1"r` dMEG0x;;y̋"o$B lcHȱ |O ;J '}P<-boq MB`RAsPNȮP oQrΣI$yabH;,_ L)2麡MqU QJ`{{G ~;7/i=Z~È]M*+,>;:4<ȃw>JS&Ȏb ߑ!eEP2НuUmzKEgJc-yg$0B):{>z6QjU؉=3d#F6CnǬfzg#k ʓ)(ƒ@DUHk )%,SZYfY"Y'Tf6FU 9;8kfΞ88OD|x(rNZA$ "igDT.^KWF-h#LA-!rc~ai ҈`Egu[)ESJ*h#籟_TՊד1O uС8 ä4gx@Fo#Qs& (&!Aqh2p JOM~ ΀ ӮPw$ZLPe-@ $se"ne1Gm#D2[ΈmUM |\Pacڶc $\.Y("#h&>UA[OsFK:uN#<㖌 jc391:[ä:Oܶ9:s"QV cmy   (Ծ KA5(;Q)J)tA' a6c2 I9;kFա#QFaAOH%,Ҿ56L $ F6zP:9\d>g\,:R[u}Icj3Z PX`GDQ:Kk]f즶辧O#3`bkGB>) QLZ!~m~Ik/*+e܁ߑRF9^~O=|. 3P{DK-_lL ^%jUl4hJ˶  d-h؂ H)-RyPgI@f7SqJ!vW_ɑŗo{~RhCO>ԅs֚<'5*.;uY@H%?":*( 0D)R6{)A!`& HB’tN) E뵑fF<xzuu] n,q~sb,_(47,\vk}nqwy|#@Eں;+G< ? _TO^M&<8&Wu@s~iqGtvIf@iv2 Z#-)?<=VZ䂶dqzuNjyʽtڮXuy~SQ^~uxqCX6f qsM #@6Ga-RWr=$+Y7ﱈvxV#.s="drȬG k3Ј{|70ѫEةtQ i6OF6-ƸIo]ŏz4mL#C]Jjwgg"I~im=RwOtvdoy03G6a>֗X 'kg:v9qͻNW,<&-{v`*NrVO*#uA^T0۶%($bD$s$MQbHRڤ)A( Ff'O63g t4?}^vxHP瞤?̨`zPӃw jK {U-n겴as*iu gWT@R]f]n)?[MGqGt54 wkw'Yk9髧Y)GsˬlY3X8G;_o~Z8`>Ԩ 0}?%.A/ci5~qܛ }rOĜ-.^ar|8o?[LC|n6ո-d{{ٷ魩W_x\-_W/A{>>ԬybDՋjEٻ6$t6`܇$ n/._~:BdQGJNW͇DɢHCdžii9S]_uWW! _QXsDhU#כcQWBUr㠮uӍ@WP+wwoF߮PP|Œro}?߾MƗYFSscߐZxM ٬ji8q5yFdg? 21gNs($.'gd~9 ޯ[gw$Tʲ#K\;v&p{w_б?Dd Ew.JKmij4B~[&,[[c w`U<9 x48Lr἞eW +jI.eҝ L;2fy|rf9*xck#`4\#w݁{rZ5*Wz<9-ԕx< O\ǢZ:(Ft7؜{oSe~ao?*^&3]Tj.]*ŹG4V"Oޞ`@X6$G UJP#$tdwT2ƈ,rU YuWFpo s Wgu!#ycYOK17 ૓:SW'߅ w]xtMyۦT=\n0Ҫ/*Ai\u%P2gJL QJ>օd4݆Y{CH ' -bЩ$A1g cS &iїh +)8?":V1̊׮x}FU_'ξfSƥRL,_r!Bn+FЂ̍kheSv z .tDX)S,9 3j"[$*p%[5`wBQ L`w/t4h9MQLGr],:1ꢬ\!͐gl+GuT1/=? k4ɵ4JV$ЖB"5jm"t@@iJ5|ً |/z_=}.[꜍n5Z8c?O#&jzp/znm6;*xИA1Z|`rh zb;2W:.$Cɪ:rUP%3s&F}2ꍜ]-҃_v;R{pҧi3yjM6nGzz;l9T8C1e9{JOo;࣒7=9 ӳ/$l_DK ڨd0:B]蝧TKC^ JAW +})91 ErTY {#g|,sNj:(fyf.>n{嬭)|P}^~1 .G/_\Ξ(^n,lI4Eh@hs}y!Rmo=Dou7oe8ϧzfsGsen؇'4:O^i0?Lpޣ>o]:W7<8~|R~ߝ^?5_66<']Cq>jYڡy髞?oW?]޵w9M=x/pyaV#Ciޡ4Ҽwl{NMj1z&H5)}|jAvњСV䧖{RCFCMIj._\ 4+Cight!)TYh &,ɖ^W5Ar*Ue[`%d#p֠ &T%&j%^tH}|Fщj%DXJuk}rjyRZ9YϻYO65NMk%3{k{"d\nGj5F2r^w^IEofkӯg&VeTA\P-O|;7g<&Ύ6Gee]=Kǰ 9⾹#DfugEdVDf4:=yXGqy-ǽgۤ[_ƹ7 :!n?Űb-, 5oS9rC-nTnv{ п tf|opW[aDTAY. ~r} lwo0D Ċű{:qc|~og&wzaU\Qג|8q5@+巏n,d)F1yb St Fb6>dCJ8~ (Afc(BQ}*%3U$?RQ ,3hl)N}' 닜"ZMfnjɝ=|Z>Ơ[ax3 1G2yg;;姰 ~ZySo\cTmasڈ);GC\+*5ڠPEI{#gf|<.3 g]h]W]xHU[^?)iZ$ pv6tz4v/V k&E5MY2'D k*UpYl9jl/ !aENC4l!\TBS ' B(BeZ  lrA^yJb}E6) d $@@R Cl 6H i=Eb;Ȳb<1}ƃ(>x-co6I}>B=nqɢ"j]08\vztѰ ('uCQLiIC P#rV+2DHV )9[l =l mcb_NPfպz:)%[yWehfY$ּdy{Wj%B5Y% )f65Z]VpUi\ b mU΂E3AR\.ye1%Sʥ#bUg9덜-l&WI>:l!FO!KۈiT6FࡊAeKhȈRTe1Ae` } E~f `T@UBV9N3rV^tL Pk.?i𓮅R3ib:W dDgT)+J*V#2j H3(^Ǵ\^]J@io<ȀRE*oFۘiځ `4` F:s(KGj;Ұ犈v5)]K4%}0ܽc{>^Q} P ** 1*V m4F ]4PfZf{7&b:nXh~84fqYTˉZ-ʈ=9Εy3^MBѨIۻ&7#Gפ|ݢ' ϓ=~ꭳ-C[ikOr+څS+:a@ MrIC\o7^Pl Ā>/-5UPĉk)NZ'' &kMDcLֱqD:Q,< ٫ };}ekmz@;B 2__5rƕ/Z;fL̖M t:R6XTA6 DND@ciUL|Bή-;-'קŜ a1DĆ e!@e.y&I$mXe/C +eZl[º 눬|L!s>XM%B.!Ar&QM? O?۟4m B V&EIX:A)ȳQ x'ŵ HO.Hk~>0\R`\SiW!(~6hOg;|ËID-Ya?oa\+huIXC}~ '^H8c~N&v3mG;e_ $D', ^wPޏϻ.=P:Pht)bLCM&ai=j{(Z/m|YV6u[UŔAX4b$TL'~ h3"XN ^Ps 8`j0%-$A=c7 dr^~ܯ/&D|YgEKЭO W۬~ʔ[08}\ Ed!Ek]moH+?qC;vEwW,9YjeYԛ) NL6f?UO5#ZxI>- c#&QKe;TY+\6&fRtTkdP]1H4Gݘ8g8FT8w: Oȭz~EYȳDzfmaX,-%"wdۗHJ n/gR'.fʸ#ߟy|Ǎ4G~˿0(Wxn EU'> |ot, r+H)?=.YOAdRLͻmv>KE}9ޓ'ư,P"Um/jj}zo)Ź nzQ)41r<quòNTiXg!`':}U#7}-t_ކNX}lfihNXRDh!Q$x)64O ?98&|6M7674]7i5{vMU 7W<s2 ?QaCa8,_HN3{ p|!b@PIQ+)u`ĎQb6S%#hdr.ԋ1ϲ.Iy+RY;1&'p4ᎂI1t6Jj9GaEǐ 'ڻ^۩n~1[]Fiבּk'Cn]Sx;?N`y|!&>dr?W7~uZv\itUR*"8E\)Q|4SeF<g;?$W3~uSEN@IK@h:<*̳:VMs3{6ȋainܓEkQJ==U١ꏥa:67]yޏ>Ko6~rۮ.X,R\~l= Qƣ^ez܌3RWY`IF]eq8u5WWYJ+:uՕeprF Ru.*K+ZRuQWrW/?N'WW OӪՄF]'%eRWru%;uu諧\XH]Q.xM,.;u*K)mޡb9ʓZ>AT.BާO g'էV0#J+t7MgMGs/ZP#+?4jn>!gQ`٨,碦2KiLߡ(yNN0 8uť\UK"WWYJA:uՕ?-O`F]eqٰ,nR ~*]~L8 z_O~UMκK/\ϧ?.WWHaz^VH& -7lN-G'3?˓BEDN$f$:k)㬂`<pa1 "(5Ch}BuD\;X  /hf"!A"9a]b2 \}HmHIPmr&IWz1Jє8d J.}TɫfuLAY~iIA}_N; sy.zHK14T].yKEޥ"RwȻTescP[nTu-0.Waq+A}k~jiQX+g +Vs3Cy gEr@J2AR<*5ɹTFUO&研&$thy.~]-L%ʬbN db50lL3 x!h7F/fZ7B PͬN5gbդ)v=cXQ <#I`D / '1ozGL jLxgU[QuE.VgMI|2՚1wV þ>kSlGZx<MS]BŹW=nk|ңWtkj -ߚ]Q:ti%n)!c"@!'1GwR Zs&4<՞J!ZM6@V\ UyX>VB [l&^"<[T7PÇ>}I2 fsųZbcwr.?7L5SzWՄɦtP{ԦisoCȆm:.cj zϯA. ^)a Úա_mv9~lP)ʾ cѷZ"kVCYZZc11-TqL7δJ?gd+TMg2gc A*LO!hQzIƫiDT2h R)iZQ)9; C ,BDF mr5&錧`5ѪD= 1 d$%?@ڟݜ7-=OA˧7 vMd^S٥j`y 9yI;'1_= _U[|Hx1K,ZdM& B{jmi1I&yD.in;Hz93bg"w9" 1&a!Z!`7 &} B`xb'IrZf^@9m1C$ 8$m:dSlap?AP܍9TLÿ.fI|SVњ˾EXwO-%QR&m%.)H@=%@qZƕq'5NO)%"٠TX!.Ԛ)cAgsD&FVA[ c1qv5tHiRR΅4E!Ut D$2: 8 ě/P,GL]r]8KĄXDeX8 "@n=j/1_'ld8rC$ q c-(0\5E2P(Fhٳ8[$qH"e+Rv3m!Jƞ;^GRAҭnu'ǎݣM}fC8R.<0ÀA!hֈ|C'@l;D7Y ڑСP![*xԔQ1Z&wrdJ()u ZmlHCɪZ+UX%jɨ̜_)cӉ&fHt BCwKvg[ Jk $ءϞʙ#j+k]oWciۤ}zhzo]/ ?|r1FAɥjU* }Rd ңîuzT5n (hg_TI^AP%1Ġ x  H[BTmr9D5RB)daő/%Gs hSUU 5Pc䢡2t_@*O9k^˭;2z|ןt\#f;?:(lvm1w'gZl^˶P@g>2婨D|-ֶ^:Iwh+v8vMt׸BLZ\mUgN{|;\vG1ΞB5ֿ[ gq kp׊;PxwUm} TMDtƐK@l|)P^,)OQEƉn|_.$ F6H3jj;8P>8M\)۔{q2FӓGff$#3p[#.y8Qꕷػ ľ (3r&e]*XR)ydDxKA%#}$cFĮ\cTm!smĉGC\+W2iPIGg4㱾G}N}->mz@O`gg._Ǯ+j} \RI je&ҵ*RVWclUu!G6d|Ux&H>i]4*.D.)ST#q G/k3.Z 04*c/\z2jrZE5{ 7ذ@u4q>UٺiHwVb1Xh #8,GP& +E43ZUrGaٌYp7h$ʃN1@yuXLaLeȣYP53`+-\4ctT4pq+L+(Pv68[i4{UN $U6FࡊAep$1<S*{/O1% 2 tD4/Ez ?gف`mU UYEN{ZNs6fJQ嬼~y^?CѠV֨U%h[̫TʐI Mw8"M'^Izpo3MJ ITakRlJ0WъZY~hV2\y}0cG6Um} X0*W*9XuBi*S >{/D,Y, ygW`젪FӐZ:a&֧%/}$Z*!'+ dem2NL^wI 䆘W߿Z䗴KKχ*T~~I;Og~mڂ&|ן|v4)-]'HKd@$k2%|%U7xversȁR^O}BN(YbΔ]<݊sE]]|]šog~g`Vg'?凋l|L~ɂâ夊ۛȵwī,7'O2^/_L(^_=xhwDJqWM\cqWMZCwWMJ]}ʓUl]]5qj&+kEl߻zgw0q&-~äd}XநǾz4:"w%㮗WFtWM\⮚l]5)tWFV7;?&KoJei?l[t",D^qi!סur 5'kS^tʋNy)/:E/r;ENy)/:ES^tʋ'uʋNy)/:E)/:ES^tʋDyAx@C叩Ihlk 4iw]D&e/6Z: ֊ śۿ/W:\}ji,? jA \.ˍ&nٞ/f+l0 }хTQ9N\KLXp$g$,XT9A4uNR1*:ypE8pD5{q z,q6ѷ]-|{y7'oּ[w dyкcme lVXU9|PQlms$UP\ɾҹFӉ%z3 zߎv4(op؎՗GG)"V 8"0VF )=!wuF Q} ޼"$#K<"=Ŗd˲ZeʒfbWYUA%64\H6Y츈cV>D{-M}8;»l݈orL c9<]\Ϛ#Au2UI!&e XCc$PRh,pQ(*äjlIdǣZ+=j~Y$J}qy15 re'<+GXƷ݅BqN._ 15*M&D|^ÿ ^#y͗_˗˯_o&><]dQ8 7܂rNJpNXg#t1zD>앷x96ձkɖVo v3kQll}|sksn;مt@gKASevNv']. P/- qi׳_(W#i0Y TDքQ"w<0NL(KJ:g Ժ SWEK.T|^NK4Z.nu^L]q0)֟..^~}Tf׫ ?GÎm_kb[ s߯ =QI1dc.,ZV^@71~Z-Lm'lkwV&aww6˰Q[xXޤS* lܥ }$.2Ct&sNXti ײz&M pSJF|Yd+ ]Rh8Gc) ! QF3Tˋ !1t"T.DE1AeD*i-BAa6V^&<&We#{Ӻq~xRl&ɞYRԭ̚tPw`zӜ۳;EXdsO%MSc'nwXںrHRщ. Z24i% 9lZ)ҞEA>hQnP[s$򵵘%dzR*>Is>m2nF)Gfƾ*Bƒ'ʳMKc~vnnߛ2y`= F;Df,!eoc,:ZsVkd)ɘmNGŖVfa( &CR)4¡1'\sLlb˓5;NjCo{[TR7U(x%Ɓ"(!3E-Q#AY!x_#G*|)^z{crƴ ]ֆK4 rQ!ZH:ind1/Ho8RUC:ozM5״CjYk≍㈖4ݶ&j'_9v[a6o- 5K;ԬJG*VRqSX FA[ I#C)&d”V}eҁ*T&_^#vJd񘁔\RUU>#D ttXnkc8U&ӒݎͭB4eo@jkТMBϾ0D_9ܓ.&*"мEӜ,:hjBݔwc՘̦8np]yԅJKκQk_|䬒r٥O+SJ @/O<o"kY@[cP쬷93 L^`i]f`ͧݩPV~HD]t{B+B"tAzJ>wxy3>{cPY] $'bjLp)+p]|tN4+M7Ȱ,$κeF@z3]L)@q(B"@6X*o1g١/H#ΣIiD2d-ScX9%]F k'ӷa3ؚSgN΁!ུ u%=2$C,JT7tXҬ[}ۮ߮3S!] Fu^L]q0?]\6 rk黫?aǂk_v(nvo*|taoN,}0j5i~-{[kHHHKrluZ=2W7qvT􍎟` z3P)zcd!hL7c=C*ytA)0<RR:^).=7V(kf)qQue/rkUN.r<лAajTj+/BUg.,ۻnZz˗7$Po6|t͞OW=xKt"ؗ*h~Og|4hLw\:4rPU!YM`r|Bw$ͥ4f+7zWOV?ErQJbF"1% Da!' B<[)HIffq,ٯtBi0jԪm S)OK&n!j% |뙛 ^V9HpyC^i>h\ҸP]WDht# xhAe7 zHH6pPH|DOVd,gL ɹ38AđaZ$%2"WDR).T>JT(e+CUE"Ykc!F&sm:jlDV;X|>gW:y-#N>G>e^ }6q9zIZ̩[?<Tntr~q Vᢉ70X0pi%>*;P8 ֽx~[*NUDuLY @1`> g^ISxkYE>HcՀդS* l 2`t љeFsNz6Ak5qv܊>AJ|V:tlvtFA9YBwp -iߺs7޽ݴsNwvHRQٱ*EΗm;g;ĠKHda~iZW<.a7|EԿul4z~n\ȐEK-7χh+snLψ{-{Ć3Sz\<,ݤQ?܉/}~ _~nJRq9rSI%*9{^EJob ʥ, ?>OϟM5m\_m$Md RA*rb )FIX/bڲƥIHm*'  BGԥ8C!b5V@*~ƿFwO0˝햗8 uͤу-Rl/~yçfz }zJW?5=>/̯|> y \DAQ \Chٻ6r+s™Fc-'duqd[7˔%НDU*p11y|yY's(ɠ6#:*bf1E{P5rCZw0ɲ`)T(6e o"M|Vesrvݥۻ\D37OZEd = 39;MH%$I(i;l׶zŵ 2 !2 9)Q@!EE%sLR }QbArXT)%jDc\Vt{-gY@[*c}8v5f!6e!VbR)K bN'ӐOQO̐)){V *qeIJ'7U^A=]ϘЋfܩ=}ii$ N>鱟5>8XPx<}_!iȽA\ߥD{Wz69Ffɖz3I`^| -]nզ%gl/ [Vs3 7uc\ K~Q/706=j sT)z5zȊ5*+X'1]W\Eȥ͗iM/aNOz~ 90L)Y')׽5zx7xM'@A3w[[ZzݹweymAyݢ1lکo·9ίتR&31/6xċe/TZrm=ɈE_'fdzK9@935w'r@/_J{y}y FQlW\Oll  V>#)F[4#򽤒[&0sVD+ךUhD6QlzVj|TFWc Y'9)B= 9TtQފ#|y# NZ[@!G=?[C"D?#<$hψ^,!׷p.m*FH_Y)="DH_8?G98P֞5R~i*՜٬оs5>UIīׄ^;gm`%6Q#G䃆7]P %$)kCT:8qʊ|3sXm9L;.bեye|x>6ջQ?n;Ds/6~ ` 3 Fe[zR=/y,{-mS]ujh`r0RaH!A ?9r!<ZG|gʉaG*6!R!p 46)Q`\* TJđKgMUFիb"bH\DNgyIxgXpaa{nZvܙOڠۛks+ʱe29m%Y wyݐƭ MڡKP=:iȕ̞X_klt(!R jwSNADK(& R$ us1.t:J rBfPZpZv[%jq,N4rm09q;6-"`S~|IBaaB!Bb)ex5dlUT# +2L!±:Sp[V^McΧcu r͋:Q3Udp19J(FLp:X+6wԱұ[ĩ"k:7B֡ܐ ӁGJZvO.^ _Ƌkyėyh'򗒿"i~ 0T5Xk1A:X97W:wNo)z =|k>9} 9lɂi$BQ:jSAO{El9C9ҋu=jbW=m|)K/+T>]4~~)ן2G2pӧߖ<ݧݫz}zM\[(v/Ѷzo9po;T?ރ|-{[V8^Zg;"#!-`HaG rk Ƀs @E4(7Bq\A͈^"^;_#8LvmA (lj\ו֖dJL`ΑCtB*z$6pa5vJ  =VqDGGBG^dW\WKH"Cd* )&3D%#$z$ZG?($ք;ihϠ1Zb@p,͊s$)CSn.O;X{Ko|Nwd@AwMGBA}} C-o߯8jB\[(1AA;#.0gSNOBQ%er$%JڨMl|t[\HX.#d]] KRHR8sEVm+XS 6<&;#nٱkfݯۛugNdký%d,?:Ϩ[Ȧlpf\zwsth'ܸ٘66 a+Me6yb@;{q|n >g >nzϮ#TW^:s0%d@IskgA5lLJGO%=ln?B#A=Ё"ϠEVD`2h&4T<51D\czvtpr6uyo6wwÚL5[ӓwCaOX&.5â7rvx` \Ry/CɧSQӷp>~fbA)9y9/j!u*f$!f2لH}7 (]*(E F:fK4YI,hߖ(뵜*Z.BNWPUUɣ Z?Cp O_#[BϊUa6z TKs0CBNު9xgQ6d|TوO +sMI'jSr\C2 SF*Fe출mD)G$c_[mmAm-[f~vpǖɯIS~xuuIŮh6)HE)Xouyk+<;_cw%[AC)M1{HM`)'Bk.Un뵜;\Rpy*VtՆjvl$$XDjm#zOHc5>omK{V9 ! 2l3+[92ᢲ7PjF1qu,IT0riP(kT,b7"эq=U G8:T2$N5B9KkmD گl;Ti@):Vݙ_m' ɎTHsbŒ[Ά-WsajOȊYHy{ݫ{a&-mVD (hq^Ek`X_zOE]թw#h8ZW!)L)zo@qz%R bMEcUS٠ CU҅u]3 fOdX++1h 19H=Z-ν3ݖCujU[W,l`^A|nKW^<)]ԚY~Olnv_Pse:-z (DCS=Pj]TbeuɌSʩQsi0%4xԳϖ I+ - ""S*JQP6b*W#zU5>(-\4Ak"@e*` e}Φ:EJiMoXgݖM) W7U.-*E@)5XZ Aߧ}dI#:@O^nwG@h+hW):xUldr|NY򂃼i^'q҃v8AL&M!YJ![q&P%~W#2j H3.cRߞJM^7س&Oaߚ<.n7țl|QGto$>b-?yrT|0;A >h?`NP ԰{Fk|;pG/YՋwhu2r}8zeET:ǥ GuZwBLp2ꪑ+6,9RuըzTWoP] NI]5+!jZ{Q 47 ⴰlq+W~XثMvgg}pgOg7wӡEVR;nfLL6FgW5-{0'Ξn"nTRT`!'kmO5r F;Fw97d'{GhGx) W)]`Q!dU5)"KM?odI? W-oGabIf[ݲӷwV|4iPmqU }%8Ut(`uQ%Wo$ ZlC䢪s-u4*tUeN:Q&%=*[MHn䬏~(ƌ/3^zK+o|xӵ+í/'f&nF@?5*_d{jXpv{CwHQ1W=xY݁ d#RO+?2xl叺CDS`9(#EG^딂  2[[e#wՐDZjI SQ~_6Uj[5`cp.y3FΆi6w<y<ꙙ]3 o>uD$:ʓclXrm_;!C`r/GF2zrm0zzYΫ5Ǭr5'AF3n|UO>̼0r5?\_B __Kt:4U*C@C&2q00-(:z[XMNoggp>/g&.Č>RW?//q?yle9čl(s{ìSH  Juu/WTlɐ+"Χnc^.+Nn\.ӧ/%0LzVp_~/|uv'"wgg-_zujIF=㖯 P+n^KYtΗr~Vs[S\plP k֜- rȔ@ n2O?IEz`)"Q; qCbCfR`)Jq3ƿr;JL5Z<%肕k! Ae@ZN* ΁YU4ZsXxvņ+GU'pKF` FՃhʗꁂhyeBMot$fD6IrIM?\1.ꁵ@%* ɥ0TL`2(qZXNa*t8[Kt=)/ 6Ek-Ail"]wr7-I+ߧߵS%dãDz6[Eށ1vPEI;'mhl4 囧lh1'Gɟ?LJe7h=5uI,r&$oUt^(whmp>l'jFOXK)=JڈL) xmAzkn׌QAta7VBg]hG]W](Sە~1)]u95v* hnq,ZF="ؕ="6s6Z6"d(!*DJjo4aucN4,P2)7l({䃘JuEd19ĩ77b<؍?ՈFtF5*ߵPrWH+7fVQۂ ?'z& سhrOڈr^#>9/luL{ݸd[ع^QI]A(:VN:*k ;Px(N)LN JFâQǖo,S=aʭOrC J=V(RM쓯}*:p\:oCѲ<6OxjI(v+1M85%*.ڿu,ɐcp /.]Z0a9[QѡGT1`Mv^Eʗ4B/ 6e 3/|UfRhɏ_'Vm6[׉|~fm/]:-r ܺrXIfպjΌV($wՙ$x I+ ɒ^PZP%蕦ĦrbKW9FFfMR>P FKP72RHuR;^7ԫ>LZ o,UGYl)9a+IN&vJq/玈ϭѤucm: ̈́oT#0$\=z_R`4| x,E, 8X.x9;nt&fXs\(Cɐy'&@$qsv`D7R@l_Sȭ}Vɋ-A`z<to(Pdok"2 ~nX3YBT9΢o-1A#jfvPO::2kcTp` })Rd[ C ]KPwzU7r66צG:pA%ьmUB"KjU.hCT!QN&AFL3ubx]>VپoQ1֗lгAtf@jl2R8a"EċwMrxĽT' %{W< )n^n//Ga}$^)>BbK(UJEܓNUݔ 5՚k ULF O)hŨhT90OpP\imԃ Sx”#y*,/$m 'bZXNC٘b?I'uNX+itqut)WDمl(_p۷,̞KpVlMޕ6r$BiVW}h3ƌa6E$n`F%YRڪJ&2"#ߎߩ+pg9:<#(X`],W?] =1 J+@pP9l%)aX4"Gq[|CQS&dHA@Z*&Iu1eO*L q@f40b hf Jq+ 82Q1xΒj=k&Ff2q4έˑD؛ݲgvPp־XQ_૗.(5f_ B^3T3@5c!f8cF~VLoz+ほGmyxwϛP+%loaRu=\Nmod!Y[qW nߑ?[g(`,aC)Z(rs\ 8[왷ٱu'>_Y ּ5rWڿlbPSh9.81fֲȝʀ9=0 bTmAL&ʌdXGh״:bȳӿt!ͱl/iΛM;GKgCOphp[ wadY7䛃x7Yy [cav8ji5+(»U/9*||Y™K80fa1#˺p :?2cxaXyӊ-jCdUgԶaL  "[Ҷmȸ͠C%N뒔{/n6]Os#H"i/4k\iʏfaƸ ?F},qQ' 7ώ s>s'mmbuf3$5mUg)+y{oh9 DŽ혷1GDOfطwkh/xi*-(LD褠]b4|I+WpےnYEgIPFvj`<m5^i4;9G%p}BkKSw9k7uoaSeqď,s4'^Jj"/.;YhUt e1*)JYZN+v8hi?T iъ[MExvJќ%Xi=audrJgNZ0Mba]B7Mv6[;9nPtm4xRE\xxgxdxqk* Km`u4[mMBILhR /EJF5r6sR x4NҪ{'_z\c=)jr]Q,#Y\pB>j&<_qwz' U0.WB2J9[HL;Q|JnI@pmi| Ls:#\%,iE] I 6[DqŜD{3H62JRQk"tD,hac`m 9:WKNC'e6ɐu-rn6d[p# 4x)KD> SԓKMo\q"&7*}ʬDvcԊFh B}ܲG "}$Ȭy<9T /L|BoX 6@uFO%ܙJ߻n!)xu,uQoiIQ[dn-ڌ(tj8G[xC<6> ܕG7ܪ 5T܁GrPz_6IF#/SC *Bgئ0o Ϧsjyݟ)o&AI{c;nm5. 2u\߳bCl_;p$x&~z+Ԃag  <,mp̻5϶ZemrO{-O?|<3A ꝫr 8 Q*5"Vy͂~͋K>lp.UWUiaE^`,+DqRX0:z9f.o7xe"hai) <1Πy`NE0 =!K =\ANŒԪ[rʮ'KE,9.U&X䞎)Sk:'G*E\Aqʔ8!q ~ҖʓAWZ.2qŕ`D{2d8K{M~Ę.'|G^ z_tA6rp+''N-0墳&/}M+,޿>Ygmdg;ht" UUV>F!r-'OYo!Y"GIF#%Iب]*)(*pɸZŤ I0H%` 2n6^a*MV#{OlWOŸ}/τ?P"~nߝ:.%;N,^{IjS6ͥjs\6ͥm\6N)UKERTm.UKkjsܝbTS:u2vH.#T2Lt.SWйz!̠Tm.UKRTm.UKRFRTm.UKRTm.UKRTm^3y8 zye\>KU-@נodŴX*gHNz躇K c]h/X@r< 4%d#H \EHI( B*>znl $ TR$+A <#:j4¦ĜSÔT?{ٌKWV>Ƹ37"[ժ1v B\konkij\ s$É\ y"7 6F:d+}r+ls0#,iM1b^Pc+ %:D'oN~J8# 8fk‘W>Jh>z 6WRe4DT.j[9 SV>XJP/wKu1/r=&"x!70DIs8ժ`l(;~&ڨ!6s_pFv­%aCJ#@׫tʱ##͸>Rs*No;Zf^Zo$?1K;mUxNG;t Ns%(at\`R54rg<x`SyӼOw+pW9\< VWwm,ۢA/q,SEp4 |NܢOvorKg\uX<}NnMh[Ê^$E3sNNErWp:QhL>xDWΉQzeJ+7NsAlP9rfl`4{D 1QL"߄eF1.@e&_)}GmEŨ>BG3v8\ɘ'luRߘDC\] W%=?u[[VR\80SIN{#WK87BZ{'YդV#"-nz\$d@pf nbkZvf+dGv,Y)K 6nJUO=EKIudɱv5 ωJAos2(/F_Z#c3q֘ط iƦXPu,*ީ4;f4gvvw{a.VBWo;7R*ȷ̃@ dKp']FgJ̅J;\r_Pv jcG`Wi|Cb3oe]u{ ʫ,IR(.S0HֵM?.Xi!33IsT2֤ ,QĶI8ש b3ucD;"xV/ػyczJɩtlSKra7lDPQfbK(A9OAzW@fGٝMS 6hZDc26)PVcs2J0wrg3qHeb ݣ ZtX8frZX%U8OY•ʷflCn?#V4P֣C/PuZ'LjvYlHq쀙\tHP5gCn,w,ٱ"/]br@ʃH8 A[k@PȲb1DoJMCJ%ax J')[@eф@rdL S8khW_7N?xJQK:&hQ!IQ\(Eα1uDU 4ߊρz ޱ;ds(h5+MR3ͮ:*.zb1)xPDD~OYspĒfrR%x >o㝳ovی PK %Oyd}AvMbGI>o{|m߰ 7?># yC`0DE6{'g+֬LMh6ƴ3iݸxJ κ тt)ZL;ev<_R7â}a{h+.E)J\ZDG:c흚* 8׳°7swd]:~8tq}y+je\揞aYΧǣ=jw Q6) ^0`.bݐN9[{=Ж~Io@ݱ-l$KJ GVy]/ cXSk9!`iR)u>'(Zז`1:J`#XdKk,:n&6#p9Qj]?Z6}5ᬦq-iWƬֳ"GwRowb{_M4~<{o ծ,Őn!tYST{A?%E*abB*I Y,DM::/h;Rsk;e+f5|zCw>__Ҟ4i3mBf(>2J %X C` NeG\@9v6:KN>++$PTJQDh=8[<ˋ=cD""K'fVi*}:Ջ.<WU\@E!:\8\5%{v$;IkniRZ_pWզ^(/@WZWUZaRwzp%kgG඘yrwyø|dZݸN7Z>_M/?|7n1O$~o {(_޽VX% l*UmkaJ;LF0 ` g9JpUT• !,$}0p*0WUZWUJ-:\F~#ZD]??]\|O\93nT]5\_f:`~s>{H-&zHQ&ӠzoKDP(ћHXYoFy~Iܿ;ߌdmӣyě nsRp_/n;~ >̟͠g<Y4io"+) =D3`!欵u)Bv~k DB!Pl=2j\02d Cahy^~{y/?""'PLf zy^~{y/?YnY!4,|ng#_g|[#މɯW&[o!I4z,ˇk\&b-J^4=л>F3VUNf3KJ u+f{VBKj8nFķ2/c4NJt^}U!!81rkkMPbYĐٻ6W\w,@7nKUYSVtD>>UA!S4GҔK 1C<>wZHo*?J>R;Y}7YYۑXB-6{('Գ@Q2PNlBvK=x3I^B ܽq7œ-|EcpV{qzus%~>-2`梌N,*@nvh07u9j9ݝ`gGthuEjՐ/owRI{ )7>ZVa/ݦtXQ # Pؤ2ȥ"oS=&Nĩ6q :J;m|\fLA@ņBǩ j0H+ YS)Rtn*F")o.q.xXZ\ BqFEL& %{I/qgiu.!{]$Rt#26I.i^ Lzʶ'%~}sl|;tlU uJ"jڜ czQQ9TNo],Au(E/H   ^2٘=Yel+9pdY KZ@Zl@WbIj3%#JرYH*/ k 5PT+Ť%%}&JU^RdE|ͦP1k"^GB14.wnc@Ґtl#T},q1 98]d5[+>wmImqmڻQmh-Z(MgZT?CɗJe{σhElzT4U 8@`7H]jNO램u\1됧eO)_3loM4:cH fbE頫Ńx:EeJ.3Fs1!R܊mM#9Outm|֔avb@Sp'結]"B.f?>|x^Pq/?|ujKH}S׵hĞ6{XlHy?gέ)^6sKvBZgBZN鎴^Oǒ0rA%.C7RXShrzD(3~B(JU'Q kN#ZDmdȱȹ27BS^Y%@Q 'M]X́ccRhT΋P9d//7kܴq?wl]mąV|۵Ww7 ۻ?q"P0\E!Ɏ1^=^>CepUGt pw{3,g:84fD$a0s"RUznd*NeeGz6!¹g%NLJrlǛ׍qÿݴĿ,Mlb>*0,_q9 w1x28{)Bs;c PD! Ct@Xf±/;-F/*XWb&aVB!Z@ (+YWm;*ߵx2xyfq<>rH#*A'cXbN7 xyugͳj/ P 2=2ڠӣ˛>!乔fW$$v!zۉ"^6֜moD[?$>j3VA JƲlQEgSu8$)zSB"Pb"W0tLD|R\nDXzK;o|5oO5 ='s.oqc75qz-󛚴@x󛚔w3[L\ÆOU0P2()+;QwQ3N=lF=lF=L\TIl#46*$>Ђu\skꚰRB)!E4#hs hSU쫶5Pc`,'9p 7\UEɖKopcve<_j[&?8~0,Ya&{,UsU>7wka|{Yc S%Zn -o?31+Ֆ&Zs˴#4l7/r'`Z_yp{}zVz >lOĽ=A Ӌm"N0>Pj61;M3EVXsnf;MM^Sl^m⢫x4Unks.&;: X*1R`zFeY'r4!UR.$X}jt7͢ ;FGZPdy_c)Z!8o5W 'g|gɓrYE-Xpc h"hGi_]p33ɸ"Uy)2g%ĈV/MJٳ""}-[[ڟȱGogryTN DKu޹Xu+ݧj,d,Y, ے'g`쾏l MCl_-0&& S,Dk8Y7$L&gj6qd9 wl_k_;5T':ȩL|3%4L%M (;m -R#s9]Ȗ*(Oj)޵BJw1֚NT8B-˃+tU(8f3VDLߩ8m6X8DJD`|dW [Oe3l?f0j]NƻbG[xjD2&nL^,1YD?F!sq\|[.UYnU6C_K^U\ $PC%cE79hM*['?Oc#voHRTW>*Tht ,GLadHݐR?1;7Q.hdUYgG Ik!(y6hcvf2mLWW-ޘF!B>&QBؒg` meO #ՖTC%s>#8rt[?i}$žn$㓯n3vX)Rp0Dk2La|PSʤ5Z AT rYAxBbNg1.l A)g𥪬Q@5g@kGPr!M0I|VG橸e>շ㭸dzI6g]r=fn>ݍ@"YYC-Ir 6l-0{O S@46x , P[6Jl595m}@<7C{F,b~=+fAǴ|iX{$ȟED=\$CC5hb})y[RMw@:Lasu3"6]`j3+-ny"~!V{ǤsW^-Ѡ1^&; OPLPOw(vQ&ciY)vvy;d6NCR9l1}|惛%w>T'?AFAAS&z*;Mdv| r-VuT^39_J2z<祷_z+nޒ_]7x=/i,1>䊭4#YT !{ H l EݷˠE(*AWy'_.$ DI-Z"mK^QPM\.h%.٦РMOl$Ѡ9~dYϧ{U]-Rc%"'6)s9::;^6$ZFT23}95jy(W(\9JZ!v6h8ޞ8=nr_M3>pږ,H;71OwCPGqp(MnOis4.N.F5.̍\gslxJn" Dc` Uoq CœɜgYKdna$-"P\]9r' ~#vHlH{V!td1~2ap]' =FZd:e)\S;Z (ҝEO@e~;02Bϥ7J%1P:}>eIV¼V(^s3Tֺ\g)Aֱb42UT"8T!<o utx)w7$}pvndgOl?[uƸ[5 ed>n~zGz)z\hKpFuNSqL jMKMJ-u ?B(0D_,vIlZĵiB8ܜ`wk-!ʒ^OMhoQ%Zf|%T# rX{ʖ50 ՇkCrf݇tY\<@ K@.س`c'!Đ3zOe=kpwTxi`T.ѝ4;y;Lc?ȹMҘ䬔6 ˆPzi7 Fr̅bH6;#SL!ة&F0W{r޵57V)JϪXtZ g iXeZ,f/8954>*M2{V^sŷoNX_g^_#&ݥ)zhEα@3W N$%PU 9(cɞ , 0H~ aOPzR"XHYTlp?H1O%jOSڸFxfL5[5g-Q%c͍qQ:Xrh!!8bMAeo5v͜%:9NpqF`<8>^׈F]a(4:V> F`Ҏ@npv+pPrK |nK읣KR'82)HfL0< Mӆq7Y\ktJkӬq1M5.̍\gslxJn" Dc` 頬Toq CœɜgYKa_|6WW=vV؏%Dُ}8l_n:y,qK/ LɦfmBtNp<$Ӧywjiev.,d D C(I[Jpg_{kO) {B\Zb)!zNuFYB* ElX)oiكd18'?Im[b@?bs~-\ڣ4 !w#vt<fƤx}>*oQQ4m42 \`ACd@͈]bYni J?^CJ,%6_ Ґx03h ZLM@&l\S}U5$@y3d=9PYR*.3hۚER&j\-aMΞ~Ogw~ _ :CתNBD@4\?5f6l.0 {#S0࣫St|!]z9L\X/=%P'_yҚ';i$8:bߣ%KQͫ`5pL WrlNn4 ~?1)Qf= !b0JHj|jvt׵̙+"6/EL4yM޳ww{Y Wl.R"ꭢkq^e4t熱81=IU3OzGW\r5&Vg8vʲ9^ 0QflVD &hN6a Ζ\~x$AlVxnRz wᕀ2m|%"ĕemU hyna,%QVt)sVi\ep׆l4Aq\|JԚ*8R!䫈p=`e_BĒl"k=>c=m8{ĺ'/}ÖXtBcG)A,}KjB : VHϰ> }MLve/+@!@J1(cʐmdcE#, 3j`Y|`3'12(r#:za\_^Y1oyaTlsIn[{MС7iإ2jʅ4&bWj55 ˔*`O.FC)%ZhK؛3)Jg2Ց^ݑL=3_ܙf]%O- <4k?{Ƒpmn=R=;wq86kd߯gH$ȡđÊ5uwU5XrƜk,IR4(ݒأ2_qoY\|} :)Rk+  nx4;q%.3AhL&mBsxΚ; AR18:Gr>ϟݸMKH c!gArgfLB= _cgZѽ{Sp#"D$I00yyÌO .\|m'ӽ~;.5wo^Pr EB&d- &gi"gb#9K'e;yQUp!]4hIihU/l]v[^N{a:LPݢ]my)=2Nu]8{lfw2] <3|;7YIr LVePyȳ 16P^e?z)W5iJQ^q "e:|iR:?׵+W ɬs7pi}iѶd/8Dर`mZ}Fx1a) ?2^!C<!"1Y k,6"ay;#4.]Uov,Y?Cz&85\k/%%<~$bi1g;J ðqabh з`93+[_wmזJ&jjDp7^Q_SOx q2E\V.$Cyi8 ajD<'enZM& h ^FQӞ]  Ȇ&H<*pUhy}'NY FXF9k9GE Y)wcHMoK|\T-Z^i\h,pu$@X_ݡ>11Ş!Td}!DՄY+}wP?eҙݫU :~#wEU;CWef[WčK1Qld̷5/ Sf9A4am˪fSi=?[?l'1&A#F7G01_Tqbq]S< @ S]H)b1x`lQIx"s$FH/J+JԘll*ڑX]v7h)qמvqc}%/"1J $503-s"4 <,٠t*i/sL(P6x"(X! @RZ Ѣi18w[5~"鋢&D?6_<@da.1>&㘘0̂e8d8BD% F{Rcg#ql-\|ʮ(@wW5zN\h0!ᴃhWe~~" &eQ QXג)E 0w(0{szMnQb+j. R3E/7&SJ%-h0 <zh"Zݣ(kѣy> c"h ]EJ-E(83+l]qŭW%+@I1^,JQ;YcӍ}Bɨ{2AŎ<){zw<^Ӻv1p%m Z"~YD+tgȟ=ZDW+qkDͽ#]=CR\ ZDWXfpEkӈV|0 P*t J 6-T'piͧʈV)>Ӟ] "|`N;vu?\BWCXkR3AWHW[R,f`ѯnpy9觅~\ M'~!i Lr_r"zr%\kCX3((dQO|w d'@=WQSz7"O}1Tajs*Pք\`bf tPyٸw=4[Cl?Kia>I3)SgRQ{=_M#D*Is4{;1q6Y&QpBjo^2 Tre/#sDO W:_"˰w}_*5q=K&K ݧV"veyoѼ>>{^#;uVF'MK#M.B'٠8Q;V^V:ށ4旒j)p`Ne g8zӌ6PM$NF{ (߀֯)읳Q/pUTi-8dQh0+_ =Ifm 쿂d|rO*o=4 {[yԛL> MҋV1 oy)>;ɪwdq=kRܔTzZFIdcSM6pټ67?BP-2%NWE%8REXD'#\ECNF0:ݫh߻pL].yg#W~zkuk$4v^om:_cW]M^倭⿹ /(-^Gn4LrgNzA7l[>wWd[paj/8 GahFq~ 3[f\ M')Ay|Et-!ӓV2ET2%diJ3ē,B8My&U4ΐB nodbPS ^iKoDkJTj65OTE({U鹊lRE3*/@J%*‰fH}Hj*}.W9*(ы)!suɌy!AC)Z^햊=hr0\&JԖpD ~!`EtEZp;ЊHWχU*OW%9s+.@SidLcEX?#Z*?#JvȟBD`Ek*¥ utQ#]=GyJ*'VJ[CW.mVCR#]=CRzS@b2FS _d:'fMi/}G\Z#:vhˇ=dK!s 'vϨi5j6Ik^pqvlK1iVm>}_,Fj @?\ވ˸ÍP4@҇jF8E9nhGK"J35b~HzYI~-<,8*w>b1JHe34w1{Ǔ?>~81qvw']v}%hWEYּ@KЩGгgsy. JEy>«`˷~M*4**\us{a?UZAk^-%#?bo޻*?3q;Ȧ8t_ Os$..|{hLSy* u^|140"6c +vrx?@l J#66H) vVk/; %Kۓy=30 ąhM.]P_#=[t1'r-ܟ/B\Ԥ)%E(SkE T\1]:9 <ӴΡ/Cs_8?԰Y_ 6d_]E'Mյq0[ׇ}*V^XնWpcV_.Nkң3y%gWftS}b|v3iEZlv,sZc&_t4^0Ssb o]ti\'sZ~+,1\J$8; 3[4L /^ tVgr0@|;cu/fXˬNAEp9;D";Z2z-c%FXѤ7vF[}Qx'ʑ/eWi7<-ηn̽R̔Ϙ\TNXKp24µMYZrYgr>_*8:XY3B9ɩChW-sca1&(]ir L>={,,ۙa{5Ǎx x@N{l<V 99KERBcf/6y CJ9ōl?E-m=3{9A Z sUkf40-cj^-5Zr$ʢ\gvS=޵,7_"A{Ӌj㳮ՖVE̿-*%Y,ٕQQ~(ә|>+/Oj1EK[g5O/kX¼ᳶYëw=qn A YhϼG|kslUb>0 b&z"jHxY1ܐCPQ uf(@dQ=9k^|2*k7o~\(O]K=`#b]|4_F>NvtTI GX=xH"lїJ(qwLH+{n^Ԗqs68h-dT\L!(>Rź2bIb*ʑsWCRgv*)gD|6"VES֪R$%LdН5g7]aۛˋc&r{ߵEl-&$,jfq/vNl~|bۥGmb-<)mYGSIٲ+"!ZF0B$+>YUWt(ElɬS;1p4;1;k*cg]֜= -dAbu̦1Pl ZMT*)IA_"`S^&>Pd7w+eR6%1iY֮D G*X=*eBSΫ\0ُt^:ƿIch1-03W [dpFdpd*+ ,^JРB@c]`W\PM]+%b+})961 ׈ECU9{WY7S.5gϊmOݏy*H/ün7CV6B!|fpݗ5vg .^?~L!]^ۻ[6k,64 4C08D{v{e8g;pv~yr wKiۆOO~y×q=vvyO}Go~Uşxpz@߳^o)zUW[L6tL z-?-|;Խ^K_&D184W_MWVv":!Yj%ԊϜԊώԊΈԊ&Rͮ/52`}B_taSj'%P8ruh"G`M'>3|22-Oɢ":YLʎ0TpΝ&TM9E=ࢷq~᫱9#S:n$2GJ9шD54@VnB{>.x}rL!G6+>⯣eX2Z±r$ I*9Y>^m%3;q:? xCc ,|'e{I%8`h V+w̜8vN˱5 EZ|t ȕhdmI cɖdJWtcNܜ0Ily̭pUQq~\Ǜ:/gvAd"y$YfP3X*q}t|Mz7ǫwAǑfÓBul ?AA=G{"|sy?o\pMZ ߤ'·_5*<2I {o쀋#7BLzDEbλ̜]gDς Td-^Ĩ(Ec] r;NeC1ʸ>'>< oƫ~_]Yq1Rɳ-jlH%3g ._n N86PP dQFmEOa@PMS.Nn 3jۚh^٭ݜPr2.Ɂ$_=2.χ!˩gWGNFk&A(G5*U)yKɒ3FNP dqej5F6,sF81\}JZ1&hMޖ[sv[~X-&Bl d j kx$!gqqOjُYZy7 ߗncUcE RIЉFP+VJ1Im&[C`r(М=AʈkkZ]N=TSܛWsv[xs\sݤ\6NV{B[KV]m-Y` 3pt8ZWlۙ=v4,P'&,Al|bE7 =֜pԯRda<M>"d'M[&k@I NI ٚj%AyTa@*:a+ݖ𬕲7.Fj6١NLmˤb3SOЙ4"vku(vq>Xg7)9.v.7v]DtѪR(s@@T4s(j& V(27cų{Iǡ;7`:.?$)Y,уS;O&n)O_!ԺlqUE%n[mƈQ7 QdZ& %kDx?XWkhD>h)}Z{W^dq,@՗Oo u.B :)uc>شH:(2ɃB3~QPM 5Z҉nG>#dZLKBo7GV%T-BHSEtVxVZ1r@Ď^ф].O=k2 :8˲. ?ȢdDM Izq- AOju]L&%]nIrale^}1~1E^%\6Ik}ؤ(CeKM10H,VGgDV9d%pMNԵHMI#cf ꢱ 0te](TGoOMᄊ@jnGdfH)ۣ^f@ ָh)Gk|FmxJ<^PTl@t[ x4shW鶌8U& 퇺A5V-P|պ*-%>, h,yֲtX\Յ%c3d FCyXa=Xq9cq_$Hc; y [ZwUTiR4^`Q1 ]APb6иđҹ@'A\J}2ɮ3 D˒M%EcAE $e5̀j@oB+~XdLǤЦ!@4r@B!e$c7uӝ1(QdFtѷ< 袱Y74LbDb0ciƗcYL%bE}X6VrlL:ҸZn HJ$1`j(hS Dt0GQv1/Е0Zë񽇢./[ܸQ4"/O@H&J,qBi5X*  ^ B5tE5p dgm7nk*f"ҎY7IQǘH.~M'ߥD#&d]Y;Ty5]xc#Pr+&{@\!i$R|pLECMZ 5-t_8g4q4iՠ(C Y0TⲱмL}X kIt -< o 7.cHp+ПZ"hFymC7YK1Z1pߩecU0giUD>"CO,-VX(;:Sv;H/9hP _C߅^:IgmG_H@_)a,C)9J p[O$Ad.IŽ4:9.z-| bhwXC-D;3ڄ`rogc/ h(Ц pd..h]ecQm\ghHQmfWbd@JPH;8*zVa,:°"  e#@ A36%VmZb 4v+itb9a$rT{!!`N7#R"'QЍ3`ETQKߵ1].%##Cgm> O`TɉnjdnV?x;H310933SvzN vdd4RYh^rM&ĜDh>: ;"GW⬟ г?{Ń;zyX{G_C?H쯉whᝃ^9Xf޹;c c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&c9&0DŽkrz&`npf))OJ'$W iS\ܮG_]ͧ š j2E佘l}=wTؼw79mws?_[LU2v`g#ÈÁa9: yS:MɗC͹W EE_j}}ַvum'SaCtЦdUEJqN;/SR۔ dNd+BAXe):x [8 <S9#-Ss>7KUKR;mI*Z GBEâ)>Kh>~(٭~q!0r^)}} `\<=@X" W4UD+z)&%D[VANwϤD287%ٮ{wnIk&&;fgxdAm 棫7{5S ^xdz5z j{?hs3\S DMft88wX0#˗ݗҾ~w9D7g[D#<$Eԫȕf-4rr:X`'s͉t?{A{^Ps|) 'ɋXTwxcbSE!S{D҆ƘH$=?Lv}ss$]:_|}";5i7Ҧ{%zV>f@\xꔥni')}=BtiwN+;+;+;+;+;+;+;+;+;+;+;+;+;+;+;+;+;+;we[~ g_ an'b`ҙ Yݺ-K$[$%Z,܍6"O-_թOv';Nd~S?٩&;=ML]v(Ԓ뫄]~VgEC}$05ATRi$/gD zJATpA `b۳y;,Hs56_Ë*MLAGfOmRU1|24K=83Mf3!;\}]^Xg^+n75 YXrҕLHerzh?/o|MV@@A>s +2Bn욁A`WGs7l2pE?ܽC KoaZy^e)% |> &_e2SJ¿R5p^^* (Pohk o$ē%l< X*/'W28 at(Գl,Ǿ-ٗR]0!H'͈A;i|r-;BSJcb|:c$yϽUM۶d^A;k[E)Y"5`^E&5ȕ_w=/}wN_wJGJ^1`Z"_J;zJT^]FuŔb`.F]%rMN]%j?wuTbQW\žSzxx<ɾ_~oo+B+vaxmnE9CEj^ ք#շ%[W~4s9R U01h'M37b,{A-R11Z5@"|l `~)V QĹ[DxV@h&9 u+v9 b@kVTWWP]Ijf.H]%%uURUgWWQ]iD4W4_JʋqZ>$Ņzuuꊶzzb\]=`ȕ'FWϢV`vu<*)?/uEhzJO0`2+3U;V/|:pPQw(9vsGh?=ye zHh ZJTrJsƼΕ(1X=86F5Y*ziZ, IɪhMSuUu/K{S O9S7sM\ȥ!rz5|ؤa<mX= Mc0NF}o?#*=l}ba oC6'O(}d~өeYixɅ>6M[_/Q}kP- s;mf\I͏'TM.,|6h7aofM|vqn L9߇ >| _N8^~ JݒjܷPHDHK+~Uh|1 ; Waƃ3:䩥v\%)9ɩCU9Cϐ):3ؑNke?-GX< "RLhS=1cjT+%4fV5h#2N:=b*JP;W3 Q_n~?Y=8?v]HkyC۾R(OLnQ+ VVALWOW1Z 8D.QRek2&Z9H: 9 jߏGU#`83xzBUYLi>7g[⤬p*W`۬upMVs[x1}oH(^ns9{w?ڹIn+/Ƹ n←d izX}t3.ǒE*L<))1 I ƧD4Y ]Je87( >5M#n}ܼ\SE ~OݭLrctwv(K4 #vVݿ#70l&v }g3鞏O Kl0=:bNһTěǿȹ(^wX̵Q*g er+gI1!Xe>_tPO}ң-q<5@.D2GKc53GHਨKUd"i ΉiH!Q#](7+pcxnq@" FkS>vv%RqщБ㣣dg!0pw9-r͙A6ƒE ,=$:ULJD/zHdC"&4RFF3,#SnE%0NsʁC&W1LFaΑ*yW/*YT+PpQDK1 ㈖)Z͵4J!"wKyt"@ AHRVa S띁njL"^ˈiDk45[!-#iY>~)ZLѪpe7+"lbr֮XpYPe6y"vgb̈́+3c _z<k[k|յDn%mJ$ }52ń!:ܝv,g;tY,wpg^qke'+n޽xU`bڝa:oYoqu{xu\U ݱpU0=詷t ]Fڐ [$yIg}2eܞ)[i=7?oND7> 6U~Gz i]SE){Tkv^rǯܱ%RNe gWZʄ2[N>jb w MX5u[]fMr|t|WMqmlowһ0'qa0bW6v)DQ? Ցvovt؀k5ObyF5@kӣunM8{ W7W?qU5oK0#<k<-9G<*%kӑ -kѢ_q*u|uMJ{$ݰSWo@bRg!MvwT^ -4Z_u &y>qg^g?jws2Y؇Gו&dj{䨔v#Ŀ^ChD`-/( HR&j<J(!UB ׏(u}_r^fojC6O.@a-tCѧy)wI8κkKr&%MaTAXu91zy1OQgnGguh+TI~;*dQFkPETcDqiqζV:d I*P e5|/!13ˌ3Mh}W&zFYu2   =; {yd5k1-  jw.2$ J;ʹr)ĈiP1"zLri_rvC!b"5ODeI"RDK)V6dT JB#X[DF4QAJpDRQ 6Pd4X:#r9uSn嗐6"ĂOZ k͞0҂$iK9AhGXNP?G< "EA^qt8fa09Pm4mjB"<ms9X~w?)6ݶAx7nceFXb/ Lz+Xˁ%6G.#"@4il\3WT§*k!Q;H(lĎQ9IT"<PbĂѝx,ز3W:tҡ_ca 3by%/nYVɿ;긴1)}竟(_ Wg݅Qìȵ]v4/s\ώ8u]Pe@WfeE~17V4Z_a;;nV? g5CJfЎ逃!ȐTNs&p*E%9R kct\a 1M) Feip-]X/5^2aKW+rZ/Žn.4lڊγW`ǨQI"O8u0 B3jYb<i:dS _^ur#h5`yHFǤJC82aQiƝVHePChi-(pRs’?@WJ$POw6_>M-[ַ|-e[Q-ۖ߼R~@K^~bXx{8*g 82eNsB~ht=w.enzw5b7+q1C1Xxw7] 7YwWOYk+%[vzlz","𧽦b:} ]FX+,77ΓǢR!w*CAp>EN1 SUȅD,2-AKT7CUuWr8ar%4҄ >(3^ 邕Ed,egAP k h8*^wQBK< *JLDJE! xa0+;j5s@kpT.)8-jPɎ%LR-aZ.|zWo%hף196 -ds2[ mʪMN˘Jh$L Z24i% 2RRY4-겏ikrJ9VV)SGЃFL.1ٜLk9W3TmdfvdTj#cW,X( -*6(~p!,Og;qCOO'8bg#RbƲ2p\Z2):ZsVkd2RNJg몈-PX(٣8IJm(Y_ AC2%)c'9b`VFjlG0s0\FPPjwQPJ 췂ĥ(ﭰ*o6Y43ga.U1XŁ2d 9PZ%š,j_Q)Di-_9OW5gǮ*#jQ"InY0`JDJͮgNQdZƌAd Em g9cƨ@r2# lF͍D[ĩ#K[eDf ^u \uKZgQ+.V>)uņF,ErlYVmH JAF0KɊ4 ɜk]2{k 5W9s ><~~QFA>U?V#eq)<[Put?'T^c/_8pa.|8>r+NƳe;w?~X ."s_?ˬr֮bJn*vqN~[$ʯFp9W_iYHgB Cv{3]E'_w˟VG(zlXs>-^ht<[{џi8y`e{zZ>|78\LM-' e RT/Na?F- "E<:;u!O*!Ze*Ű+\ePeDe6).(TW 1cR2ǵ1<*-rdZR n+Ņuv;ɀ Y(Yǝ6f?hdP͜>!tw T-vgq(톊RWܯF%z7$"C~ h+e 5xLS̒ K3 Q. C쀁ТE!|hy0pP)~ 9Ya(dE"BcNU6 :f:*B*DPY)܆>d3N m Y~V3|:|4sOO.FaevvzV{g1ub~gkF0 z5mAf 샥#4f â bp}!gc GU6xqܣoѩw46*GV9=8d2ϛ-Yjc[=yE,_)E[zf9wk{1+F9|g<33 ʍaV)e<;$/\ύ~ %7~i3#(Ҥҹ$eimrRjcb1DK_ˡțIiHAgWqzx<{<>tg4OOLXf~zַR=}ϲD!ͳ<%W8obJ1rx2a/Pr7`h 4z1x oBre`:`$ v$=f& IQ< (t/gZ ;ǛY|lv:z#oxR22e=̳&!ȥ936g\*#[ЀLԠg#4֐GtS΃i+gD9$8l$ ''eUExZ1A{2Fgd9&< R )9ǂ %#h,ռfa}HN/-6SC vfwH7 ^xy&B7O8ac0eѳ(L Esssde*fh0^ >Ol g7uޱ?MHZ1*]O=T`<޳ r[Ƴ-m޾gO?7٤%1؊fC->\7F9R7[ `+zV&=vkH1F]uES E>%( BG[ ?E<`ॅ4 K@ w762O.ɺ'_/Jɵ(RW3=vZ3Ɣk<`7ƓxOzIoK^$翎~OK1\w})쏣<>Ci&''E[$i]>ui١u֕~u쯣I-R:|C ZtԱAv۰ c7#pmjלHoyySC羡|TR-*%ۋﵰRB,v PSj BMA)5 PSj BMA)5 ˘c-)]AvQ]b:D,K{D.K 5S*TXҔ3dɱ}9O<'z?i2 O RdR sXy3AѴ#^%@lPI?]oɑWr Q(F88&E?e)RKRW=|ćHc@$>h&tBlKULHY<\$/d$lOYaM"@*r< tZ|^ьiu ̽}iIxDP:!tB5V(%NNHF)"׭kuS'6ܒ ҁGJjTVO_Jp04S 4i~(jd%945RJ߀CP𝛲Zr tmu=>r [/3Y::t߼ĽsZ-2L;O(#Fl1@VGTo|`֦U-*ܷDۮܟ>%*0/Ҡ7B!vMm w]W?}7/ ˱e׵b=l+!#}Z`Hy_Q[| pG;e-l;u$e:jduJ\X]3k1UFF2C0J;\kfK鳔A*yA)0<{S(R\:n@P#1#5Ru`v#6X"W u^HTYQ*XZʋPՆOk.V7-=jX,_(ɷm{O&)Bp%6"UsK;1^=^EcdnnqRAp"j<# kd.{# W$~GB#zp"q3zodΙH6?1K+*]oNGϓ^|xSUGUݭ'1-V h{U~%?f?v߅~ E!*.4@f2  &$,6h4Z(T,Yӿ- ]HWdMκ508K|lp:[R?pr8hrKjۀ_%*m[V`LUBE&_ӏ$zSq^Vo^2J bcg %hmrڅ?Ź(Gǿ r}nQ.z(^^ɖv Nnm;msVKLz$4c;[w&;roW"}#[:mf4n$=RtRIT mW{Ţgv6741tD*zRࢿv@(i{`a3e~DMvz2߿鵍j?,)۪Z-M)V=^ZF|Nc J}pr=I8RrZmkV%#ȧđKM.=VOVK^U63S@r diYR%3Uy8{}--KGY\jûE/Z3@\rt9v"C)EG3Bdo0Q󣽫p xG>Co~=F oz$bW <{ߞy`jRݸq}2i@>UQFXKc΋eu`-w >h|g6>Y /rb9F8Hxd1d-Fc)sVFpR).T.Ht)/3+C*h Yxo0v1k6ܺzw6>1$<%-#m;.R;R8XڿM<ظ-0ym!5 00mqQq[mq-&2防"E) @vJsQq$ &:/UUIɥUYo9=pcpt,F4ޡt6~pܯ2ͯ=}m6t:5po2 VqdS˖qgKϼ^MkH.ҝk:q1߶l\Bi 5 ~B.o;&LϮHoټCϹsn/mz\D+/]F͏&U-%AޯmqUn:0SSoiOݷyf&}Yb+qӵAreojY/Gn]l~K߁bxOߜOl7<zS MnWUexux])fn"U~9^ KЎ\b|Zƌ `(Da ])*AGXC$/\O\*ӇAlՅ0G aҼ~e9XAĹfЃsW4` SY2fSv#p^v'/cfF:Nۨ$]*DB9c8O.&hk /Ll0^$% .Yt_!HFrjn\Ul -rVdEoҞvBm;rZyS|bD8-$" .!$b@@+@LB$O$.i3Sj۬ïF;={+9#I1.AaZ}dWhƭO"lgU̱Rhd9q 0AdR l)O9pԳV=:oT:|2` X!EifD,E$>IKdT ܼ=-DSbN F1m8jΙAE[&"U>[YLPALh3 0"RDCKd 2N=-œvTjVXoB4)-.{Uҍ%(-((󂄮8&q-ybyU)JT5Alsדּ^Ʒ4yȦi5QiY'b]D8BײrEwMNAc2lҊ ۤ/dNcI#HD]KN, %\1K0: c-idlT@k(#HxU4yHGs@È t\iEoEIr*0H"JzcЪ *(i'є se jL~J-QJ:SulMr)XAn %}6BEQz Ogv[EygdΞM@m(|ʎVg eŮtKv͑)a?#QvQ3%k"<~U @`xB @xl<%ЁCz}lϻ%xz:(2Zv28Izt΄q$52?TJKs_uQ|^z,>J9ݨS&t^q_'ԦXzߦInm}w=O`Cζ͝.JÚ]Slv|OZt_axrJۜB2SQفfӶ;SZD^l؋!H/õ":A:0=0KSpj:?FV#a'#΂n ;8FR{ wCio"'L I@K |O V61-P_*6C7S/^5S+{It@\;%$zB,Ec1Ţ0G $`Yj5i@ƐT.͙DɉC§TZW7PYQ g%jFE:)T\VGV\.bר<zB~QKU5N[)$'D&z-SZICVFJ;AkQ/&ǘgeN[LO 6XebF/K Τs.m geUjX[mlAm 9Ӄ{Raä?</ƓWnޕ6r$ٿB q˶{3ƴvw`ֶI*^I(UjEVFDDi\ʜI]KB<.9u6%)q#Z#zQ'a ѵ,fv~%8 A1TBߎ 00L>&$SanLۍalM⺦- =j vkg%~3C@@呡kjEvR%&}A(tCFfHBϣB x"+7b~`]H"mjm:gc0"6&q"NC\qi1-9.F$x4l&4$l.X`sf#WɸOYaD)p5sSq(6\' ULCF[nFcsXptvXj֕( 퇵Tڰ#j/ gp^OZ}ؒ}m%\AWN=)M Vv@WYZ!WYJ){zpH0%v9 nIEk2^r~KǷ!:Մ1t_D\ ~KѴ5hߡpE!hM,}xѣ ~zs[v>ap]q (`qb> Fø1UMdz'DU :wu2C1ݦ=Lͣ~`(q9SN(vMْ Ad{~LoudLm0QinrKӏ&(AsEbx0J@fVD#WQ0'V:uq:Ɓ(E1,B1<g22Eas1-,M9ji"SRhN&mY%Qh$ `Ȩ>1*Qʞ\M88wX# cC*r>VrPǨ6vhk! ,p"e[ YJn#K;(03p]2UVUR^ \@ \u WYZWYJzzp%LtP`FHg*KyW*K[,&K =zp%%!;W(,̱Vֳ,%K+ќde:8(w?a4JF%ꨣo>G?/8.CfZEQ ͪ߆7ǿ] 娤jלŃtH-O<"7zö́KUr(di׹ߧ8 '^j0rt#.~6 @ Gt-È"\J}R.QQ- Jв]Z[y?/ꨨ_Fٮ7ʡӬJnBf# IRQ J+Bo|9xS!ǒ]f1ZKƶjkL)K bv;9!Iե0Xgqig6²`Nղ!leqMgE_~_B#e} =Nc +hVJBPfs6+r8G̬0_ϣ_濾-N(T`wtf@إۨVzoi+?٩+ Bd:Jτ1;QR\ ^9IۅcG PX@UlRug^_d:K;m6{?=LSHsZ+aqwkP`n`-X$u%@/_H)I&.LqjITgRM'TLL( 8)W'yf NJT gsJ9JÖk\<!&Y]D ygLD1VمX V\~aմMS_oѧxYD@T"\T|T^ER}3ν 5_o§ۆK͋j]5QJӭYF:u;-}mĞT𥝋)Uk;zA!2uz @*պ pQ:-Rscҡz~E`;ԳǶ:R`]r .$+&= 6DIyt\<*'8g*2{P&UP+l׭o7 \>IWFnusORc\I4WeqqљؑԚӳiVIaoq\⸀ۡx% JXFLk^%pֱ$'LOD=%Z~гP=kxAhJ*ŝj#(pi 'ɢ4J#eZ(D\8scc I$+ABUxFg>Z8Ԧ,_\b1\9|&[ `<{_yy|dWvԀ|L :爭r璥yM$-]@/RFgFy& wFYIx"F(Eb9MM.xϧDB0*QYm> 3yto!ד?)Ў~tiW&Y[[d?Yp3ޡeUxGwӲMCZ_YȭMF}f;s>e]?]Twoos8y=۩>exgw}ҝa<miovus3;ZI~JGүg c'I]5gݟݺ&^$,yNgc=ǩ>P]R/)UݵrnwfG+kBYॣ?\aw|sJCgy3gՎX$G˛CVOS0$@ÇgGa-:Pʑ=$7|V]Gly/=,/φiYi3orl9\xI+'J 4[nr^1ƈd32O.%] {>XZg*o3%#SJAi 6 ъR-B\d n_{, :ڻK9zc]2]ԟ: 8_Qz4%%터Ru t 7,72 ^O/Cks-M|#oxqQ(#U"8cdy䭖:DhY99Hi.UTpC?:nQV TGf8۪(kso7:8 {$Z{a<"jyDe^8!ѓșQ`R7OHZe]gщDpd] Ї2ۆl;Eѡ[?C[o#[o$saݛrz_ + ]9͙Ri-b23ϦLY}TTJ#ὼn9Y@ N8S$&wtE4RHl PR"w <XC#w&@h)!'JEX$܇vp` :ȒGd߯ؒ۱dYSdؿ$k|= WFtb,eUy- ViF$X΂diKZ݇LYQP\̺?K(b NZT7llݍ8j>]יCaj'0ݮS"i85eT}zuWUV)*ɺKJ~#3IxM# Ix'@R6EFJ7Y'G6 ɖDB/ R8#c; iơXPXxT,S2-;ܫ{V'^~ZuOӳGbdJ`,PPg 2;QDXK$JQzGIS=m#eUA~ c][%+lV0&#]2};`<2:*1&SuVK\̡v3 j#ݙ~%ddʺ~+RNBy%۽6^l@Hj^v.57P QeX2DY 1?!6xLw71gPDԍQ8".7lX@,L}#SJBIPcv=8<@F v[Y@7pP _q&e)0*ٓh7c pk#p]W6CZl%btME#.7ٰRdElC(QꘅdUd xd31pq0s+8mc<y|>u Ѯ,Es d:DafO5 A?%Yi4fkQ$]RF1`z YVBQ[Jx(J(Y6J)Tt$=&&`5kL?gEX?~z#gZ/on]LJНc9}Mv@˯Y/@Ud2a@*%J0 @4` Zh]`sL7.(;!9/|Vmv:z]a$e- cԑg*Z4q ~g;^e۞ͱWk_+abݯ=,IQ=߷iڿ#{kG <.P^ShkvPЦ(\G~݌3Zn짞OyqnK~MϠI3o"&sJR% ]\6. ԾFF1D_+a7ҐH71= {/Y/.7/ޟ:੯@bvǯX85"ZmUZ/bzv:1-3ΛX! ACl1pD<`<7!9 Ax1 Vv*))kf(Rq` M+r%cu_u; ƜţuG3qYHaD%gu||Gw>c0ڷKGvz̮MrLA[:[D1 ZfBy- R V "a63_\K#lyN ]+HB;#-Y+J I P 6YŎ#<7vuUOoo:p0rzEJ"$CJK`zE?MT(jfVya}HFbޥ[lv_т8T2PO"E=|b}O_jՀ'=\a9P$m)%slp PEpյ4.!)9d8p@:P'Nnf%Ĥ[CIKVZcAćS)喝"i>D ɯsshot_'e1?ilНOfe8ϋIKꀂj݉KֺC@Oa]5>=  Oӫ%*E)YӳwW8?'uNhӓH'Wq~w`|Nx#։aRj[]թBx}~'Ne[sJrJmrvz{'m܎}PL:\{%AHޠZ N@r9Sʒ& ˁEۋi `+aapQ4ꌵRhbs[Lh,$H/@?^Ыi7Btvqg)h UCEs9J5L12-8E37/%?⳸8]|4_v-@|:T;O{ Q"1 4佳Fo󔞜"||GnyJcٶ YBD,Elxs&"b)9Ԏ{lJ0cw3]8M_sx c|ȓZ/4.&|^ ?rOL~rWz7?̶*C@iBj=vO|`sA _@m:]4&w7Fn{BhWeCN#z!Ӧ9QlyU}GujQu宦vʐ*-c L:C,"Q!R.IP^+sFmmB+^$V{s$ttQ쨈 Nޅu ,y䴄~Og/&6gT@Y ou?Z:{$ոlj]_{ TaA57$f/Wu򆯮#Y?WY%F"ֿ8_ᗫ`˵wܿIS~F/*(jӳvR]ٺRCay*c!ĶTsߞ}Ê~5e4 !C>.q~j!mÛ;-~y~o롛*J6G4N$9&&ZP* +O m>&PJ[+|J1E"E6%-y%{vgSllђ,l3v5Ub=jq+D&(`@ jcHyY +rD6 :))颴VAb1IKe%ȰѤd0lKSlV(1 jŢ6f̊%k(ˤ>,|HPN:VūB1I@lerojF:c!%D$ ARmsdA)I1B*o$,sG;"XgNJ7}jnv ݁ˊt̮Yk6K0Btr f$Lkgq uNͽvͱPGc"BG(1.%XyP: "e4.bYXC}6Fix\cFq (P,J0u#BB Fvd#AEwpm:hjK8G{$6O^ַYw1_ b!~bš'/f$_TV!eU JcV*?#c_KB tSR1[R֒EjA}PZʱ!YC7_6kvR>6Cwƻ W.dnaAv1l&..q ZE ڊT YP9QH^gg=9&8KS)x C0{_XP SD3Uˏ>?e(rs8|6*} 7,Ǧh- vhYeu/z[uRQ€B5Ȣ9X+DoT>l|FbþYo29MvsqrR8eH1IBRXS|v>bM`X$"E v!V) Hm}vC.q66 Jv I njB|p1(1BϤ3 (1 na !@Ey%oɠIX $)/vP:]wĘ>[<%C6P!I ONL $z,8 Fe6FkR'TR$+F3ͯ>QQ7c]J͓@ZDY %%vHF`d"͆:0FG(5JA$O!uz(WosdQbRYFʄbIjGjE-XF0s qQAv9-n v`n6h;|a'9^BW.z8h;A&+6g6*hR[OETl, _R*q8}9Hr ˟# 6(>`hĘtQ,ko,wn M湜x5kO|51?7&E~lXgI>s:['OO<\i9lmuEY\mwQN-W{ퟦrv6~u`V7| t ^0)yڶhZqSotm8<_̾G fzѝ{nwFW>k3FDӈ}3, 'gDv+ ۧ qז5v~mC}Iw3Pu@n<`?հ̞f;ELJ ?˃Z`ӟܿ0J: S⳸XMij~K^ @I(ZSƇTZ iȏ!5 jG]nMgB,)(e@ BY4eǰF>YIM0c>ғ;a=n'+VSϪH_?䊯 W)pdrnV }ߧ_~M?OgN{WMiBغ/ |Y_k`mz#vY6WَHkHewDZk38j ӳV} !ʻ1Ԕ`\,6ZPq7ZEeBL(dkEFH0' 9+':H Z/|*޳42&O1 H_&(1{[$)mR IzDNVK*a+AYnZ{E7z,ڴ.<;Ō pvZ05W fīWj]d?dc*hu 5ټ \[vq ?|X6KF9,~.<5T?ly;881w^Eӏ|VRr_=Jfc+拖2+4Sf,+&gC -2~q|uYRs9ww]c97nEZI~Aw:b&_EH_MWX}e{ﹺOK=?`8Y0]C}ImF3k%;88:ߐz9}awնfjB]{ԟl^?*#bDX%6lbK|zHN=vx캽hw27i/bUAY0ǜ K2RrdqU06#' |RIF l9O1IQEFFN֨&}"g;J]˶qrns Z)9czuWy[E>|D9;7FPl*IM"[x^ڑQ`/ (>aJ dΥH5Є> HZ\Yw )9/K"29;eBSS攓ZA'%%]*9HV,9 c$V; )p M~ 7~B]1\X4,Yd tr IBǪxS(&Iț t\TWh_x q0ud!H}LX\1(e7)s @Zyxlcu+{߼M;C[34_rr;e "4W2S'5QSX5Ej3DDL YGzr-:"c5:3n/TeR5`je?$8y-^ cPa`MvZK=Kσ7>:L h~GQB(Ma]Kʂkc^ 7F +C=tD)eHm(\u_2ʵ8 A\V/^.zQR[AͲ[GHqDG]^h9K7p>5DəF!FM6 ?Bg@DzHfC"pH J}vk>'˿/t>D16壋E6VdࣰF#9P16u{:bl(g:[)A*.*Dxek.ҦA9lMQ(PԔbK,U)yW.IBP[l89;Fp-e;,_ۭÞHeϵF]~֓znR׶X0֎Yaf i݇]^cvҭ]ewJ]s*+ve2'67%\W7˫t[|coܮiz|{>W#]y=g nحlZv -Դu)t-NzCP\]Oqxc7wCtqd;Pum_68ej3h)-~FVO(EK&-GaTM*V7=@㚻4W{񳇪&VtnoBkűVڭ́%w![ٕӋb(`u5+9dX+ku*^2Q*5jo9:ɀ6qIƗ󑯠ÛJ;,h¼ CE+`LJy3E+{J%7XD_Jo>L!BVUw6&0ƱȿHK9RAyoA{sm5H#6`ٚtͺ|E~ja5Hk9pS]ZӉqyg=>;zaZn1}Q((cP| 5X=W $TP8ѷ$R;F] LU1p?=c k`cg 5"T5@= ѹyӺDy=ʺ[!6u*TX} Kc>_(TRSjˑM3)9:L* +Jٜ^""F'z("="=HbreʉK>tN!r6xZIhd!it|:@jϟLϰt|z|@9(B->FPUs)Tp1PKlS?N?_>(wh{AgTO\oFe mp^ bzBjƤ?)#ZRP lJ045N!S~Ă,hr eVjk5VߒEb0%[ubEL5C R@Tma7P|;߼P|{>ՈI]G6 G2o`ƿe5QHG#:dml ^МwI<:otFzE4Fk8*a5|$;()  :ݚb1KbDžrwĽ97Cǩ>o;t~}qP z^ul A="LܘZ{Rds jշb2 RRI5zˁf;A}9aʀ;A9dS4F#x^Z4s5bupsl@N7++A>ʑBC( ` X3qJWmU4 YL$b?rM+5lsE}'<0 籫՝/ۤ4<1:O)L);i_ ΨVyMD[mR߬D ELdr!wL>(I&;uB΍jC+M̥ujzj>ŕT )B Y155Rc[Ѻef GBd j oP}_|x?yU)l0NobJ1MIlICKc Zc&)RRD㖑bh{ hh  a .43jOƊZM]Fj;UckrSc0+ĩytdzn2+ 19qMuw["5jl\=PvFJ2TF`1-h"qS-j69<.|\h]762%b;VkO$t|ipj+dn$5Lvqv`"籤GOhރ ߫S5K_(Dͪ+lyɓ%%0u;!qJB-.N&RHHi"eްiXH5QBL@٦"}*!f\Lb {e 2qBR SxW1Zģ`@,m{1tl6rWdW>/M{a/|Xrwwb,1xz򪡒J41G | [U2S%nx3ih;.f4xҳEF%%Du `0XZi%Pgɴh r==E5hrv3n\kP/L0֝-7ݍAˇBX,AESSxh$РW[ǨO3%G ڤcoEWz ;K ֛DadI2{˔Ŕb~VOPk4/N8in12uT':Q:yAxg8"d}5W9)7Apb8z4x#כ!uUSԏ"ˎTe[2c(F1W*V>/+.i-s[yuh"Ty06,1'(&FHB#f}1KhRbFeRi$G֎5.~wǚdm2, 뜜7PIb$z:!aJ6ydg]i_c맳?vGtϝBn?[okzŢgd@{֟2Պ.lot2]dl|aͻUr~ :J{U'5-^KCWMo&lnzߖ/[u+su6ո>~Z~w |1[ϋ:[&w.Η>`O?ΖD_mgMLv9￝4Pz^Al{+fc'ȁL]H"==0y^.'̩1gbٴTTc3c?XlZo?}}J1\,j>"g_ڬ_N˷* K_xn.Tq_YKDe0~σY=JRs:2N#@/>+{'mNX0g܆cu[^и{5mn6xʗ][7jw4w@LJzUzT ;,fv3\Nr,,2`!3ktN L$o3MK8[3Vl54|-蜦s}Jݒ,oS~d0Aly6Vn"~ZTOVia 6KgtIlm6(t'R:_ .P2TJuIX,;f%go"Eo'WQ^W:ּn #0 R' Z`,C 1,] (bR`:Ĭ!TNzE5u|lo \4qكd]T]çh(J5d;~ F0 pwNĹmH+Pq7Y=Wb jS6l.K^BZ8?'yxUIbw!mt:zt ?_tDK=2F͸HcA4)` z BzHqd{(04(I-4Una!T[Lɍ\X!Sbܸ'L{|S-y/Ǧj4jٙlFf͉͍Q?VwD!A{beCw\%tu`_^y }^X_Qow&w൤ކpi4ޯ0,h\rt>BTf]!7;P/T `̆S[lOaSyppUvQɛc55GxHdIŐ%H>ġ5CI婒ؕ@:~e׃x^v={rz具xWk.SuGпt󊂶bl}uey}jA[Kl^_|ᛯ-|p~vvW?}5md~ڧ7sunJ^=/W=G/Es=um}U\[=|vO7UbMK/|t''oqne#+w|`z*-t|8gt=-r :B]@߰6,%D@GͺH.Z.ᔚ>)ҿzՏ; `$@Ǫܙw"Md9t: MWRTS_l}`RZ7:&+ME1l #5LH?߹&i.״"&}''Pb YEyE);SԫBd_ĨD`RgWIb^LWL91qa}S?@o1[!I@1;'I[O}Y^Ke:o: I{Ɂ sQBZ 53|b25 .jm h2F?{WFO/"9>, /,R}8/WlIX,Sqi5fY"OxzD;5RnX|#6b@:`rEBR(`cڨ*hPYRdJ1+H, I ,cT RԔ&I ̸S`,sM$Ny鳵 FirҪ!h֜icb>ݑf[g]}CAY?7_OP}U[G\-Ϧm՝dIG%JN4ҁ+5x۳U&|1c:}.9o]\~~jKEY[i\cnᷣDE9c!vtv52::/#uց|h4}p$5ޱfR"2w:/IG!t)̉h1ƭq/ vMsҵqu[Yw׮%Ǻۮrf=9Z+jj,;&BYHr``}`%cdDHrvH>w*C A!'AsFza"XlhS%#F,R2Z m!馹 ig|>&\1̃wHH Fh6"Y!d;9Thd}8ExOdj#UNLA%; h@bm¥وZ5~CE/δ7UltUEpDۉ>\YJ4E69:_D&dmE0(Rh5 =RM-9E]FrR1Pr*ʮ2JAJ0HFf͹qfXld싅 kڹ+3޲ý23]NŪuRE_B$f'e)Zh `4RNn ٵժfu~"Bg[ط' <ǒ0rHj| j7CAmQ{dwK]b HYI;L!EEaXZ6pQ):#=df-L,cM&Pcb.&m:G6xج9aob*0}4FD3"∈wxV،HR1*aɠPԘ ad9!RQ ,wɀ:[1\L RbOZJH51"6k'į:2..7y"l싋kjqq;}#2NI@. I$;ls*>$F`B@vC`'eUHuH(|}R N$ʌ(M[*Q9-1 E[h/ 9*gB;ddVFY )e KDGZs^yRVx)m4!Te f2^ m֜CsFJrZ[b=Bو<~ܲ]'fodR [h YY9Ҏ#2@Kx) ; [\APF%sV0E=Ѻj5Z|:ItCvx:Ac"'I|F( W(&!A8ǠTz\e;1 !8TI!1A;[pJY0 +c#/UYƵ̖+"ܶ/ fCd_kghhDaE1E&Lpsg=,D,kʣy3K26FȾ[A/Ět{0"&&dDC$&Ol"s2ؤ~M_3'VW4_6v+ؿW_Kj_ʟ5,{_+Nӣ>]}S%-=haǵa[|L+? El}PhnY$Rb52#6^fyup %"m.z=d{՜sɗ^'_੣b<_WU?67ؼ*菿s~1[_߯N ބޜ/c@^Ox4N>zԚqy1Yxz%,W̳Q?TR0'~ JN$AcTd}M&e$dՀd dh-T `J%CRI &v?M$ nkcp>PŘ (P$%u;RQ1Z,ZKD!͚s']C!]׺ϱŃ/uu֧&Ϟx3>zI5='AXŧC:)fy2_эs`x֝Ok|QdGԁEK<#BB=@D? TSH5ZR:KŔ\sa ^e"IhӪ`2$mIsv CBz TUEi.3IYgB!u^b݈n-wm=tdUJonaq-%FtCK4\Jĺ_l,U8pr M-8yiJt1x$/%zd 7) #IWP*eFUh۬9[<b>?/۶8/=[ǻSo+FkAGc~_X;C/$>sJ|ͫjVGfy*zwYN=-l{Zxڿr?7QƜYB/o*ׇC-l~n`[jj]j7x[J'ߤm5:S:|KN=wpn_F!Ç>Tn}y[E^V"eV}E+C3kՓr%Z݃U}Wܽ/ܦwt̒iɁvΤwh])}\lep1ڠu7i/u#l֥݌w]caV z: 5^߰cǹ6JJȺ6Si9=>{x۸UӶa' y=w)0ńE4F^b21qEr`8"pmC5Jgjr V *l r)bNdIzEa&WG`*ݠ4+Wӕ?B!/MˍݥJo;Wt}^9#iccC.][H ǐo9Q7wAu ]~kݧ\>gۂ`!hLbvA2m 2E3 uN S;Ctq+|tmoDo>Mn c|IM7|8 zz2m7'm~n>{;m_m=!wgU۳6%OXtј]ҷuruD :َL뉘5gZ=1.VZ)t^.`(O]=ہ̫1vZĒ5: yw*"F˄!L>GT ,Ѣ4FkU0F Sƹgj{8_ٷ[?:^npYߗa̘" ߯zf(R"r$$Hp=4pgB2H{$KM Z{-vm@7hd"2q[.VA!ylsgPVpz9M sS'!e㺐ۥO]ƒ ևo03#W^ǪId Rq؏&-H4HJ АUtRdQtM"'f4cd}CYFC摘4X E)0- 1F8\sUz9}+~n.VG]Q+ ^ bȈ V50DKO5% %9]9)cʡ"K*HR[:h^[]Iwrй'A1Eia8~ t4hac>rmTϜQUv0 =F2f$=oWcW5k.΅onjs롷o*u6ɐq@*Xܑ2+ /I(< HE)j9m3ʪ'yBTtirT'\ V4&CcI3bʝr}K 1hpO7,'%.$Q[Je4"x>#VA cȹ{j=o]{ _Fƚ`P{4^ `%^dEF[C^ JM݋{(w]!`U+UFUFYGW/zkx5䇗AUhDE+WV J%;UT[׹h> z]SΌrL0VrL=Ҥ Fh[nl=,r a۠z?n<UElc~ E zkw?b -@mXz;q(C>!畴}Fuikqxq|>Z*iiITZW+~N ^{U3Y3fT3@Rÿo)JkfWaF|?8O^kXгӟh-(M]jmiZhklf[iy=&S-v,Z>YjBagSP9"ʀxBWV{%]=R[q6O\OZDP=JutOYٗh*eX*OW]@T$TœR,hkDX.΋w Y UHIФ%s΁ØopZp6̿CJ4)=s))(MgF M#\Xh:i:dHӌɪDU ]eL:]e*_"]qN<`2\v41;DkUFDGW/Đ#+L)9p ]eOU~(7]DӅ=ʮ5OX I9h&g}Zn%-״CiRPq NxYCѼ, =J^D",W?^/t7uTr6opR,~{'A(QGp,jk#$kWO^:IpVi3ry͹p8yY[x\4η7' =7'?ةŏ??=NEMT^]kԷ zɀDNV%&I@ $y+Tk|gf! ^Q}3Ou0 J.rLQxϛb7-Յf9Ra~uwnl'>/L)LprRA`R4'ggÿ]R;qBF_^r^7>lMeS7 nOqJ J-- 1RGaJVԃ@a;ܩ6<D )"M ǽwP(NR$(H"HgVKcBʘqh(cHsGj2wn'qeVWz> OBWP\W=.&? ímF3XaªWV#j;e+F4\n57I|+.U\sy@u9j cr&bj94whi~u:S}w{dhu#P\wCj)p<0k&fD 5ݛG c޼t&kOf@͍ z!_k&kx_׍0SfqZL"GC͢!-;FbD.ؾkkg] Fh"Z*{ " `'BPG]`bt^0-W2D*y+{K/ɳL^<y!|Y$+΍KT@3jR%$+=lGz^@.I^d"ƍ+ly]Ftro QE].n9Jr@&ﲥ6fu)}]XzPtɣ$>ʙ7D)Cs @\1u8Ew²2Sf fJ`ud6Y Bc&D4P4_T59-iNbU `siEPLQ'AZe2)Ĩ5q:h. Z(iJZ[p6m*-LSHz4Tu>*\7B\~D^W /]WF@I#h{D eV(^9</ۍet "9_R+8sx,1s M ʐ ޲gJ&H))\!q!.D -E&i*r("MAc A9Ul~&ZPYW8O:MUH4*w14D :#6*aY*(s'1u!ٻ6r\*y! voj/tRqdIf%ٱ' eIWj%A0[SbE/ |-YcF5%oYHn"[~Y&lOmB[= %4OTF0<xP|[|7,|T.n&7"髚')L|VK߾;%=``>w黧(H:dΜ!w ؓ]G$&r@dm1ȕIbl X"jg T `ijlcx#ѧ)_"}>~/x^ݗxwu;:,wO{&/~W?/ ۱}/mkA7¿wOkWoYݐ\^ }t]jnt3:҂mVO ŇۻWd%O|\G L}(&0J 3E LLwt()͖T(h`He-dy,r-%F^u|ԲK*֗T,I Ͳ_s}b(d!ygh7-gKY}Mi㷷j~'߶vE_Xk/>0_IǍWOW!|ր3t\k[:st}a&_o5]_LS! MQٌ`yʆ&BolY7s}{=lnugwtǎwq7dpUA8lSYEgB*R)BTƑ*$3ONpx1M=uVor->~Eb6D`H#g0'&fF+""msGMb_uUp:$2Tu=nSm'Jdo]5}PЩbe`y[MwJT\assdIonހ +抻wl.{o]rJ6RR%&8?aXb,\܍R1!)e =XGԣ%ʓOxH5scW|o7x8&0#ĊZl# @aʎqN= ͞3J9?-t>ni%Y\lɾhSv{6:5ާ㣣w]|n|En&\ Q>cIarD0a6@RM)NMհi3ŲMDTc) ۇÖ=B?AKz1grTi|"ω8Ͽ]sj>׋ݗ.XBoD4y2Le s}λ tg}΢ )@6URu&bq!{[S%K2F,|sd&hkQkVmn^9#aٳ teqe+εwn"Wj*О w0Xk;-8 9Lu/ҝ,vuiu9Sn D )mEz=tƻ {n7gz3?:r<\]m9REo άGwxtzdѼCNf__}+eG0핲kУǑ(6VTLjͷ]xwt@c&*L HcU {eL|J5~Xoh]C=֚s+Xu-K KB䷲+c9Gȱjfml8.-9vXg$9XG(r*9~ْqq>s :~OslsscH MGrofjV_n25KWK}jV_%yj/85ȹG3Fe\rj:wWt_7az_q&^CK+FyW س<Tk%x~9& Ibc5l#k̩'5&kŎ.KmgOͽɲq  qu.:dmd9D?%.a`gT`b7/|CR)eΝ祿|D)~cM[[gW \KiX$P* A_,BlJf@ofO]bkTRb]@ FC  ZmXճlZd X^#k#3:׼{H$R,`rz6l9{R9'5=8CzyuMPhCT;%PJ*K YBG6HPaO,NRo4ބhSd*Pku|ʦVNrj Zr89?,>:&HHw.dŨ_ )̥͆cḕ?llRiݫēz楙z/g}z98oÜm!c 9HDzm0cF10{+Yek{cʜѵy#SD\o|4Xh|{GK% (-*g4lAPۯ5 -F4TcRg`sciv_1 mw%_nnC!y;&vƊ%r smijyR 5&׏OT҂BP۔9>ξLCAaz2 [q PΈeCnU987Lb@Zih"j%ThbJ1J˟f?,b30C`qL }ǧkLdVi#QO`M(zWox,OΘT]cP}HUNB2v)# Xvg+/$3}?A^SPA\ZUq9L,flЗDQ0ɪwO/<}>.sBV% O>Q}j\o Pr%%čkT]ea ﶒBq簦Y#/,QYFo,ԩhbQQh^QJΖ]1lTxW%HH;NLW)PE#C'"FoX!sf,C)UYLXί,Aμ>BBK Q@ M I}R<"%JMmgU1L&~CRo~/E]oΉwx4O/:}##L L .OIu6Sr[h\alUZ$eE `Vb.1`ev. d(LqZ1J3VgRBrɦ#(^A.r3l#[R 2_frv$7W;r0󆉸S5{ĽXlv鵧ݺȶ& OrfQK#|͒wPQ9+]&s}5ԆFLJo'^s|s7Wr,eW/MRy5.p1 g;)\l&ゎHM}to됁%c.-X0DR!MPs*VQnM1QJ RgnJ>ɢZ/Q?w_g[K+򮗟^2OLOM^>TGz6Y|\\}YlJsͻ|Oz,ZC OO2kNAeWoclމ'п߾ofo_noeUbc|S@Ȥƹ#_.Vv %3 U#nJ8嘲 rbHX\pMdpvMdΈ81k1|6.Ƴxn}I,8x\rPֱGg&(" Ɵ>aJ}U %Qh6d!17%ڊFJiWjhT[6 hՀ ~_-l'x Bgp`E=~\0j(ZnvC099r͏>|?sbjȁRE(5qdL))xꝱdS:wn\@ژMߠ DujzP!kKjJaR іqrv[qXe-&Bl aG+vgG rVo.w-v}[.o7Sr'-ZM$ӸuZҘXέ`[#EМB9 @PoC\7= ktƦQ{0 Њ4S4b[nW(~EE-ΪNؚ, q4Jh]0 E#dLt]S͍ݖHd 56`S B_vk2 Fma-^;"[}E5]uNaRr]]a]ZT'gŚ?{gFBr[R&`\H'>,VNz[p=O̵MK#Rr ["~U5]")R&WPIg x]FDBPh*]YSy zv?+lîNoE|r >Fr#i9ۏ7MdmُXXI} F=O&~'kS~&kSF6YOziMЮ#\`V==r\Zǎ+VÄĕR.8u?k)&ؿ~Y~Ly(Cߞ̾[&䲅 ΄91D}_}3YZЛHȹ?)) ?vLcq{szhX:t}ِqĮx{^a znz._oND |^G[` -ӂ-VئaRjX9oQaa\i\6-TИן5f=+J-$ZzQMJLm՝~4{g= .(kRCIqtOùpn"(?faF =.FY,:W4)CUFey^ESh0^Q]Z`'r_tY.-Zrc*ti/-h qEom+H;v\AzL:^Ju+J\U?7jW=Ɠ \Aynn+VN:\Y npjU;X%M!tO  nfZǎ+V9M:>H\`ԋNW,+j)ǎ+V9 qUW!r_ ?*4p;#>K!  W_\ Skt}J;* UpԡW płC?\+U4v\ʵ7Wʛ;cz%>f2("±͏]nzQa`iVk1*݄C4IaWqrWքU:=qe: \A.\γ=XCĕN` de7b+5ҎWR W+fm<`u2^]Yͅ#rx.^[Ƕ_W/M7?'jN)ytṊ))4s[tJXZ6 ~s4?_N =1w|os}~1/}yQ^j8kNݱ]ܞ7f DEߚof5?qr@-4IW !woN hB|ŠTx@.8G<6zF#z/z@ljΨcp^y5MY0׿=^cώ|RlO\mWr[Y㦭v)lPv}/4P릲dZ)U;T-8mBO"  XfV:ǎ+V׃+ȡY-q5LariϸvO{& T9=+9C[[ X W֌W W+BX3丹 Btı Am|~@/SLC`r^0jɎӬҺ iT#\`^*Ǝ+VĄ W,5\Z'Ǝ+V'\"L6pNnprkv֌dL:@\`  þw(t/bzrJ?=D\D W,w+kɮX *ضOp" Bk; ,7t3 jI_#^ ^$f 6rߗڇN՚=eWTڑ=|JMzKH+E7bzǎ+VńĕAW>\-zz!'CxWƛXn/ Jof;=;[Oa-jeV͛7+NlY}ޠ(ůג{w@WGCg1Y Ϳ|ijob:/o? =8^C r:XK~h!Z c쿝.-/c>gZ_~OџKKŗt>rWV;{OlٚY x7R/+`|!b^|YH>z2z?[68 @xO/g]댔 ' =sdOo~*SWPԟ^]Ot,/0b)Ԩ7VD#JҦ@VTeVfkQ,&3/]_$H>>Q\U- _g\Tj~z^wr)ZբBl T}KɉѪ8-%h&(AUkդHE It5D,B[2.TT.Z5D1?';7u4JF/wLC:jR& B 2`,DH%i6GBj)x)x]uZa N%c0$R%&u+2$뒲բkAPCU'oϑRm֌%)m]3BJᨨf*$K2)SS%*D > 0X#l! 83)e%9f3NYV2:)C)Fa]q_B-aiZ64Y!)!d*ҡ0hn5**RP0&? YShxѱ]ˡHʝ_ _B, v[Hx0bDhW7g1_6Yxc*lC'P2hOBU_x\&(AWeDM*1ڜITehZ@2{gP9Hл1Dk09GXGW}<_G H"nRPZ٥XaSRDJ.+L }T[2IeSqHJ6?I+Nb9kOeFM*\ liWB1KŠeXӞB6=VR]J q(VJDx.2ZI7dK.!$xH v+ YTdEn8gr-GΡmK us። ۩xd)bVMpWdA[47PtX,8bw`&"Gr 0eW!&S%sJ ̐b]DQlkUn#XCdҡIp\Ԃ3Ttg2B D.ʈq&N{%NOPSJ(HfQ]لPeU;ͺd@/k.6-!ix^>guRH/%i pPBiKiV6DO\;09xQ>Z6hDu΄ _˅^LKQD+fFc2D$% !0!ׄea;vߙxanyA9_֜S^! ՂYAӶdx=< MԂ'a5"C.mpd݆td᫒@!іr2 lÜ<"YYq(hbQB'؅J)DL+ȫ2 䓢Za0f0F_(^| ݗTDe#ՙ;7#m  GWĢs;dNUdJT٫`;#*)l+ɒ3.`$S`Ūaw.j#N*q>e[`I%x_bE^ a,zKb fsvH/vZ@m@%\uj ˅"Z45H)"pcJX,0NIHnsp1%$I(f8QYHHz l dWMb6NsF F,n0ZZ"/x("|QydI&.v"b};I,Bi9D A.H @QUPբWĢP~iI6kmH0mR/E`d4) }פ(:lӢ.c%K۩U[4Y$YkD/0 ‰;mZme_tyXI3G-Yg!MGhDf9Eh*5]gzi[' x" i ƿ:J` آ}A4J], mmkZ>zyۜ/:fMy^y3]͖\GI`0uQ=IFOB=H`= N",j((֔kM!J!W&Eqc8lQ2NG=JMw=uG!e(mWPj(ЭF{@A4D!Xw6f j{3LHЖCn3FVxЬ!"i!:M]\z ap3vE 3]pF= R GR/Zd/ozpr,1z@tMoP8v(%g; nUuN7 ֊Rx1,=ϧv~SZ$&OIY`7Y|} 21Vt^ڋdf֕z~}T%~ЛIc:aZ#r5=?ʟ |Ni.H~r/լqhX9|<7!oհ>uYO^ yXO5ZI%2tVɩUAx?JO)9K{&I{ɷQT/\+t5(b( WP'i؄٣l>}J] mA񘖚6e.j1I1>(aoϓ訽r^nzszlkLgsѧ(cD3=Sj!wE&5'Kݴ6ض#|Jˣ{7NBg HPdv|Qf̥Rf}u9LI~B3Z틡%hMG$b/6@a/>AZ/xU>Mz&{8mm&ZTckJD28ܫާ?K%!w5K/|T1)&oȔ.+c}j6JaٵϮ}vk]g>ٵϮ}vk]g>ٵϮ}vk]g>ٵϮ}vk]g>ٵϮ}vk]g>ٵϮ}vk]?]>V^+]{pu5 Z+G^Jؽ~w:a@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; NujrhKu'0z'PNSt; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@'Љ&'P8MWZ/Fʠ tN Z4 N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@:'Kz~KH[ ]\] ]Z7z%BP]誡++z$"ƎeLW'HW""uEpm5Z/NW2H+OkCkҊh+O'] ]qtE(y퓤 Ӯ""zY7=]J**pUTFj 2c+B72]NW#^,NW~ZS퇡Uf0zd_`ЕfԪ*z*+2kKY ]\k NW2z+[-"_tG{j9zuE(K/BW'EEtE׫jB ҕ:Y ]wj*`I] ]\S"asW@fb"tR&/ EWרZЎ_]JYLff F?: C>ԛPG+{]YOz"`o]5tEpm4c+ "0] ])ުˑ9PU#qJ7LGhu㳭#X{`kail-,Mh;K(OpQ$*ƚj pm~v~ʸS%Е2` C+C5tAV[ =zuc+'"|5tQZ8v"62] ]y%qQ Q+  c+Bi)UP˚o`_`PzVWHWQ/kRW8jj m LW_ ]zw&rCoxp܇ޟ0rlnw]9Ozݗ&JjoCW@?u- ҕ})eLcJQaڨvBg9 F*iD54= 4}屇8 M'vReMXP2>r!_ec1b޷FAm}fEIT.0ոA-wqo'ubز 0gI(ah`s͘VA,W~eZ狛Ğ>ӧOaÑY7B?T!$a-lBuvxg?Mʷ)_wWQԵ{nhG?-6Oy' $?Jw%Cw}{/|_3Cpl }|ԛamQ58:ג?]]ݹH|ﮮ,VCbyWc5͙~{۝{.|5ksaκ$b:>A!,[s+\ts(KzsK_5MyrUcX(MW^/eҜ-")* ]E1Nֶ1Л#JцܫBM0{}`8.^6 .^C7o0ޫ,VQy8#nZ po^}pba^r9~Q^Ёb0J_@4vyy#éd^s>_ :sN}Kh4~ѤY>^Q_*|},?M?Rcm++tUy~]֓ܐ!d{R|DF:_!1]bHCLQ N@/r|+_ν:6RXژ\*ܚhL(Jg(%?(䧊֨ԋt'myc˝W mε^zm9&FcOeT;( jڮ^w>j\/Ut*URE"Yf,?sPHWmD|b>܂x;Cf\~z3z#kbsv֬Z:}[E ډ[g+o{ۨYbD(kQ{pmhSt%JbL2mR(R$m }I{lv:SY#p~ r oUۿ8z|OxGL1D+8|2 "E=r6u8R kbiMPi3{8۵628OYHnm_ʲ]IL~/&׋a7.ZlQ i37oo*a_*YF:(Mh;V gXwm#I_i0_Evn1AOGYJ3WMR숒,S0_bYlW*L\j'IӇCe?ktN+fL ~^m?k|_Q@My8Y 9?\D#"fٱV9jtp<0&%0o$ qG[6àNJa3iϜZbdEp(\ dk4׏ŒM>e6j-,q+<ӞIL12荜݊a4f @\lvAH>-enlۧ)M lmLo4cZp}*q5e 0j'yS!(䬭7 \`^1ӏ͗z:UϠy^Q:YIȤ(cX>rB/+/qJ8yc\Q+!7oADʂ7Ћ3ZXB Eԓ@Qߞ&=>xvSЁj1 q/E*%Tђa2ʥXn;Cprk3e_nyw>&-XP8"'VnB9H[B,סEÍ!#-1ـ&tx]~G'),gM-.~[㖽+}?|v5`j9 R ]y6JEvOjtg 5S1UBCSz6O#>IU5уHqUfW |(Qr.?\^yic3mmuq.H,n.do]w]wkw4^寴=RM}Țn'M @i] e/mevPt5uK}foZU>mfѰfZiMBEnj^ P)<w/5U.0|S]5:Z[&ֶLfJKRgfo[2f}u^xƤ+k)4fQS4{W>TG'wDț$c3#Tn,3dhẳ6z6z4l11ǻl Zj0W~Lb<߲@}Wld4&'E-61z O[.x2_"FCʭ5 >;dPN^>:LJ.r UmJ>HadDbjsqfI%yC7x=v tN <+@hL֑5` @"XyB(AUţpIe@*Rhc(Ƚ7$4HgMt1\% V-4*XVhICdcJƀdӃA%y4ҥYU"g7T MnSռb>4eЇ#~dNc]xt)*^wBňwL_MeRVBrYgC$-\7ȅEIfT,P>i_ҬkpQ;r%2@ڲ-DYVz#gJCARHRP"cB41EBR%ȰH$r%^3'#1t5 B+5TH@j|`dc:y %"a2}>E0~]i)@ErU3FU4o\``+t̗*ԎEuD\%+cp35p+t cg$cJ`\˖o-.U[AC8D@nev}v$pVC νCb JOz:Θ!tuʮ}& c > 6AiM*|H. ڨDlT3wN=]iӅFߧhI,;:bߞM\kA:zͳtq:оiv{$n|,mٴӎ|25_SͱzV>0]W f«\{nt}W]c} on6@q ?jʐ^zE?ů7t5zM5I_JyzJ}>ZsdjWgQj1$5e QV*2HV9wx՛qAF)JmvC=lnY_E}^V9ͨSC5Y\ >\J ̧F Qf^eR{lwz/^Gf7)-+~Tt}Kz⦩hV 1Tp#ʁ򻵨<]Rekdvd% ~q d>hŎwDK~pDŽYXH#aQ0% EbA}L)-kQA9z Q$NSWr*Oo`QP0I DfTĥV7{d I,sޠ6Q2\ UtZoBm' )W{S+wR 1 1ih9,OU+)U=d=#'Cկ2]nڃs+>fnw݉]|u^bʋ탏V{/P9sr 8rre` #t^ }EPL샏7kϜLԈNMfn s"ƀRp}D?)#zpl,x_exbe*ÝLc25JwQ72l6 4φ]BڂǏlP) Չ {xHuׄ4eXJhBLΘ dqW(ȤzZ xP =+wΎ&:=$`1bH^Vu Ή?RHg>*lƙB ږ,WgOʫl['higegWSM4T\@s&YuD鸀e蹑.jg3db60XסGd+)+ ˹ -cK +En73)8Xp3$Q~Pm/7OTE%#AIPbCҘ Y X"tIA InxS苑A.qz)"K"yT<ó/VO<Ǭʙ+ 91%bg䖛RgiL6)iO=Yo)g\qNbqΎrщ)"/];B+VlYc-!(GH( }KVQO`ԥ3dfV1cRzC`Iym ' ~lz;oԞ$z!eJAKo \rkc2U9J2 8zQ~ ]mo9+qNiE@ ~_Ȓ#ɞd߯ز-Y-euGNnu?$ŪUǯ_D&&Nx:VZ*q9ueoL*'<*f\ \9b%C:fq֖ޗ٤G]ObS@Oۢ|ȮʧVEIꑌ" GTLDhv!Y X!YXЫGs4ƖqdmЎ$dW0 k9F'4Oׇ-Ey2>n 9) pHMl"ݡm0ޙm.Lzm}#,&! [Ğ889kCr Ra9/LPd=W!>^1|kQ2&x5[A19p&ҨyL> ):o r~!\OF7T)p.%oFy.5JQ#e.vN#>gՎQ'p \P`(K'p$rflT1/Y*1F¹vqt~A&c}h@}) Oo]=1I#u& A8Pcg-U^H>*¼)Ĕ2YIH@4*SM@u1=IT }BD #438+Br2Q)b%+M'gn f3 9q2nu*F7kIn RmlRQ97/A|9dUyty"EN3j!0ù0igdP^EodɡŞ\QԌFJQ`LH.Qh+d\!+"oX4yqؚsSұ+6\a׍=P2횪N_xYΑ~z< G?(QWK%;W>5+y-0h{^R>ϧ[]w2\BW-2J{:BRR)N;DWX3p ]e=e{OWGCWbC^*hu Q*Bz:B2w2` M;hEetj:%9Vt\CKW[5CZjW[Еjס Hg誄kBWm𞮎4N&(#&( `^=Al<90*^F6-:vʑ&7-rX&QNY/(dy]\^ss1ɡ)9>e_LUo @; JFRjr&ߟ׾9á* +Qr8i060󀯳:PMoZ6iޖP+!k>k@k@mҟu.U<wJ)8"srti .>*GUStyNnNir;rga%uL)p\.QS4tdBwvBψ}u|7W_GTמ2H>'[ ٯweoɲcVgіf|&7h(FN§lw`P(>^䲁C&Sʖ TZʷ99W8|0vl@Nɭh3L 1{ ]{IuY-UdA}}ZwvB6uT~{T+Tz:3[\>fSW,/SB}91%4)n[ϔe5r֢GVɭ(>ViG4q1+V01 \s`2o ц(}T(9g윛Si.Y~{v+y9e3`l 3NYDkkS6tNq2>0{r=~na$Ù JRsP%EfVF,ּ+b7XH ^0UHu)\؏#\ _YP̀+|n缲O^U+U%W ԋݗ?KJ\*/(\W/vznIP:TzCP+4Õ+ЌVQ>>]=\O9 ɮUF+H*1ҕL.V̀Uw6K+tvB螮@KIU*V3t@抆 i=]ec+e]]!`Feg*坱22ʖDOWCWpyJIMw C -HvB1ҕ1FtM4jC nVh9;PPvѕނtOW=%i `At2C nVUFDOWGHW Jp5EWXWmz:J*6e Uܜ u* hPx= %v׸8ϧt2T:m0/հgݮ]F]J˦\(|?-9vT0<ڢT]oeZ ŦTM|׃/WWIf7 E[FAʏo%tyQ׿Oo#@*9{AdpR6r|Kr^L=%Iq>k8釳zˑu`Y#[Iӏ^Fv퉳9r)c:Z@գmw-d3ЌVQj] GZ+>آ3tp@W 3m+DtՋЕ? `Ew6nr!7]+D[RBZ D b<6h%%vBLHWX G]+ \uRt(M%bOWCWsкCt Iltwh5o:>J+,#XD,iu~Xy`UDXDW= / tk߶*/ؐ|&O^T23RQ~]@\zzu] fؽZѶᅦtW$-։HZ]l_8Hѥ ˦l5 /1,[-g(Y(m5C󬥅&ñu4_ ׺dP&7 +6d07'ͨ -g)L| 5WM=l*ur|.F.j09.hR%QGL:5 lRs%n^3B_(DmN 'JV4$e뫐o\i3"qrCyWŃ~:Jc:)'%x--"DO@2ݗs'ȻaDlQޕb¿CQ?g^t_aËbXwKum=?-^٠LY٪|t=^\f7<4'g'qymkS;[=\챇~H,q)h+gŪUm$?(ggD!KYg)#3<+:gQVW:._⼸D~Z>M;al쯊qJ O?Y}bDf&ijb NP~*D,)OH1re?pd+Qysn^~k Gӹ֢eR^! Iam^~7ψ|]lpmf4g4Y^Amoۦ!}]ϭ]m4j[t~`"w6ů8-5w~u\3 nuu3k{dO6ۻzA?ͶCjPfέwkx_xs}@+-wC)},)Fɹk9|OXyZS=У~,5h'hsԦYo{͢XAqaTXW̝x3KW8qŵB_q|լժxf&ܓ!/n=#,wFdszNi`<"ӧE6D1܌F_{G\-BOWe^]|NOLw\+F'g@^SQ&8jo8難6|}S}Zԣ@ї}i<K^yq3>̎e`P 7 bgܾ೟*r~V="R]XR{Euۣ^R9{5͡Ń˻IB ZA?{lqg-R4Or$zM~*8|k(ͪ ioXܤ60Bm1{'s~q9.Z#*@i#*fmHLЙ J61>35PoN6SgGO6эE&yiK,a-A~yr;ZV A{n2zdlZD*rA>N`0)Qࢷo$/N9 2?DXvqvD\cDZTrapt\ p*vK8Lj3,掠G!Y W %l)BV'7 6^2~*kO,f4?OnkKlW%U{QQxޔ{H?ZI5?M9zɶ?_niuMm".@{Z[2rl ft I4:r+y2H5a3_?( l\$^2!- h}4( RoRۂǼMoIC"*P:Δ0rc$2'dg+udj Ι9<PHi)rE;46x+m%ۭ"ƱCGAG3-QnZg  MHnB| ;8)$]_VdIYμ NƝ%rS@䉥@%2"WDR).T>J1T(e+C&UE"Ykc!F& =B擭Û q ;0ք-΄6 e̓?k`8OFČe9d"ɵd,:ZsVkd2NJg뺫Vfa( &ɵj.L#}"!8#fSjD-rk0Rv5 .QK ԥe1q% {+l0h۠M:&]6>8LȐQ!]E-PAA:.J$֢>F~}: bX?ՈFTF4.7$Ę 0q0f% fvgN2 -cF 2Ʉ΢1m|cT p HτTLPFaOƻ: T5cLڗCr$^9zF 8MPk#9!KkCs@ 9k7ؽ&DBh?6dSʣ44[^W(PnBK^neG8Inӊ$4T,,YȥLr$mLNrIn_F;9 8#N1NY@aZ1qP My4B 'FFn$&2',1,0-Z;YYΪsrČ?}Ũt=d0\K xȂgI-50&Ki#!FIKdD<*?W3󿳐$e0iÝyeJID1#OhIkzԊA'u~Rx/{D.HAP+&p=WȮ,PE$):QEqLpzw)ˆxeCV9C4}fV(4@kfT`#̅!(b+e/'],fDh#Pʶ^j,rU^g5CY\)x`!nN%ȣ!%1V:zc+t?c~R k8Ԥi,kLwg@,ds%*$L;n>L!Bp.T*&~EhI1+e \,նb, Fi._}͗S/|-KA.CVbf ѣF O" -!8}Wω!Je2{D.K `୷ADrpwHvrr,RO[j ZbOw3reZzdt^%;&X+ȜZԊse^:P;^C1ΩMp![~%SL/_u9l/x^R(Fa9SJelܐD'A@]:EZyl q=h0uMXae|>Y%6dZ1&21H$9:ɱc0v^頣 (eףdTa&+d%/xcU)0b2j һ ނg&'譶 %|o,)dY(s%}$x^pl'LpRv%eQ/ӹhM 0E)cH'G:rt%f$QgXb{0lmR9{75_wePChN8)9aIPD+%dV9zdw_Zm')gb[f37֑f>C1 p바KHHʿ/J bn 2\*`t&Gm^< X,ӛesvVc{`k"\*r%M)Et ;2 Is&LeJ:gi+Pjע\Mqey ң,RnzQ)+&lB6z?M>zy(sïpK̶s^qS=I*YэEˣ Hn֞9]f tBf߬ݯsl2&dU b,|7&:Ku;!%`P+7Qlj9/Sh3o,%<)Fǝf jmK܂!̥m8jN(ZHI8M{VW 1cr\Ó2"G%(R\H_1Tw.PQ0;km"C?hdFp ifMC_P`ᄟJϖVSvw-.R;eF.Al=C/G11t\U>ސחYϮ'asV~@&-X‡~~~È ~ʋaNVeI5 xՔSYi1'x ȵ޵6q,׿';JyJJlWʍʽj# hY,H8 rn`LwE4gIz7qZxP-څhp`2Qf$#=/ 0ʁf8<f%'T03ƽȝo%)Е<oʾ*X}.DqCD) 'GDe&Kmsyw^ے}͢tLvst#kzz[.;'~} _Z(S&K}J:t6X-EDǵ@ڇ""||쑽ax=h-xg!1ìrH'(!8ف\9IncFT4ٚe.1}5"yq*x8l;۴ycժ}󤰝 .Ŕm F'AY"[齔D锔qQ.I\oxd1YaTdn^"*bMF W =8-*Etc)by2\*GQ p;8yJIE]tZ[\0)~9X<'h#71qbZۃ+}ТCg%CC&PTڳtĆyABx 6ZM[8}fqA(3hQSj(I2yw;SBx:Ĥp ] S. 8/s=,J烛aM O^cK79|d aSalį!rI0p)sxAc4Ujfo#)=owɤ؃L>Z5˒q-wkP4 POE xv:w]ja7|pJ2]MxdJz(u,Qsa0JFuSWO/1@m~wͰMNy;M]7n[>7ϝ[* N}.FK.b)0z~&?*C0HMDGE݉H¦@\ hb&D>\*Ӽ~Ȧy}dAE:"+p q|ig1%I@-"HT@kgR"JzIP<ҐqF3Ч͍•zJm1Iyҏ~R}hE ϊ^z15 `Y/ڝя6}mRܳ0x@Ĺ7I{2 LJЊ۬%Two6O:^ϼ|-"R2/a>VʙLfR;?|-fOytxp$Kw*!?At$XM!1 рsN#dQ$5,iE;WI&\( 2\%)Ĩ5p:A 10ZP)q6C9,n8ve}1TR$$C !@Z"N t1n‰`K8g$e=K[AzM9"MC rB+J]q"-ŅE&i*.;1jE D6h M|  BȏՂH *v )U1;1:K@4*k7,D:#6*qI^5tLr0<oxN.m4-c2SY{>#]43:vD:6>H+{ٺC~ӳ m5A6聶AB~;l e!v;[6 l?_I8I+ {яа74q9}&=MJeL4iKYϚqI6eg糝ij7 "sDml#^[mͼ3DA7 f@kpcy::{yu0y{巆Lp5:q (ɰs|_n3 /Ap6p5~ȭn.O-rKrC\.C̽ EK0GL"U` ʇ6T_E,/Oww൜L?FJt"e<1=^=^5,n =ĪWv+&()|u;Dd3X$u% UrJdq ٶYԓ#;؞Nr"$nDhTr\dvsd=zJ*)9(dUWS1WYZsY(YXzs̕V<ѻ]4'ME0I%^l>?Bk m6}=a"Woayn3ӿj7Fk#iޟ0; {K='zB%2bJ*+"@պ rQIET>) v AOyrʇHu1/r0y *LC *)D"i\&Σ:D=}[߁"A:cw&JYCi(^3^=?!Au*v1U~- v.axO]Y9cHּK\cIIO=-(->Y QgC"PO<M5@e"@M``" $$YFiLV $r!(\XNd $ tr%H@xF%QMns~rYJe^6|r7]v6wތj9-K׷nwCVtsόevíUx[-!Hȵչ2ES 6~[1zz9kxn]ZߎƼs4b+8]7_曷[y0o~u3|;﷘aڮCnl 2=ZSsymkws.yNg>Z{Z/zno~ \h|6ø֢PՍrDT\pz0T&Ei0ZG{U?z /PLzYw취 Ҝ 49si}`2D>Swq=և)1qvTh#a3)yH9&XfcL7CW* d|U"[%wJ, 8 Q*5"V0/k604sHOqmz4zGd߯Nc)=yQ>w+_n'|vTL8J+W3[IߢR}a!R)EE4@' bҨR$i1&Ǿwh'\KH8.X{^8?ZY7sڪ;a(5ea,n8cF}VL/z+;ŰeEEo/X@ۛTBb洹%sFQ#U{6M]KlchI^ptDO#,. <1T <0g9NwhNaF J %%b54rg:băCr]?\ u@o pӸKc a=QEc\C~I<0VeQ ֊} ڌT@SB M\bZ;U@72vg?2Uaa7͸-Bg,#+^ɮ:k-/i_7'GL)*DU6jR)z1V-I/L}VnhAJhkMz@%TBN H W*T읹8r)<ǂ0ޤDFBXy]m<me#vM:rDT|R>x x$,$A"o F0J>X+b1rȹ3vg?AsրX~]gDt#"xz#fUq}8LqvJN8*AS&|5}%RJSA@.'8-gH$`s@sq ˣF61uvӒb茋~oT!1lѬUu# #FC1{$ׄQVpq>ph"^q[< aO*M$HG\ُbl3yݕ80yiZ)e>ʱ"{5MpOMݶ?,tNvP_oW\|%PY|PcŪW٤I&\BNUNыHUf*>40}dkE8q\ט` dRSn@t ="MX/'U$߱=)UytK_].qCіv0'UOŋ6;P̥YYD R:TQbe5ffVh,wi^'c1Z-YF<TTVЅȦ)bKW;˪f5>)-:B#e*d"D*Vd;Y7q>ۓOoRT9t -*ƈ{0bHP){$Y#u[GiOE7@}N?%XS٫ )tZɖR E'T$/8FWN8i. vuAJhcj2ΤY;V2jġT iFK^;en:qeCՙF]A4sf2AY:[{u (FevJ8sFDd;[}4{mڮfIU3(kN%%h$b)J5eQrͣy\8cG:RgH$ٲ Owqth2rr&73QGPlHdaoowɖ8[xqm'JbR[4MQ~24^FJ7⎈FʎUsAbr *D2;|+%+J:X+e+HEU,8E>1Gֱy\ 2a IגTͱ&΁).XUf_x,WɠE%2j 1h=wC6۔6{HhRr5LgP,Nϖ-'ö4Z|qZ_&g+A&dQC8J1H&Y Ax,@Fe+B/ADq_V[.b~z:c=DJEy^)(cIѹ읒{XEMAF( j.TQ~F?tP.0oH)[TtPD/%9)'9! *QAq%y;҃ޚfL7&+(]q%i ͘Q$6rpJihLoL'eGK0#GC#b$0|8(ѾeQv"]Zؚͤ9pIu GŏOq)~-瓛n:;#Kdz##y1ο|6}ÊOp6z<iƛ)!$dk|5UL8*jأb"V:-* {*b"TO.JL}Y ҊDB\VG,̂UM9ZQ|R1Mcf ?Q0t|#=tnzu=t|oLͺͥFXՌzUjEF @ b¬"SՂ)iW1bN>, <# R$iELu8VBRI&&vg_g=;^竺kWk_?5g>XWcEAvYUWka/J{|ބT$Ԡ}gkb`kϪtʼn|?&pr.s1~iŰ|'qؿ_wJ-~7)G]Jb++LIΆ8!g/!۳相J>Jmr: 4nny->Vp } ~Ors!=:Y |_&I؛HO.Gn"8y??[\*~b8ٿn%nRD|6t;vp !%tALx-^$.T kqK ۣr_"ocyOVo&g6r|d{g{->B5Ai>jjV&]AI`,!pJڊZyWDZtm5EHQVq滏iaP:9[V Hi}|ڶyo'&;y~mӯ%;bX ^TZ׬)x}}SH9P+FSc+Sp/t!3f&$PgʣhDF]fnū]oj[,Zy]JIbS ז)w^p!kjtnzo%G4|)<9l !-n>M7|Χ*'< ŬtL@Y(!;Ê;T!F˙4zp>3_ޔzvBψBu⑑㫷pT kB3uZ-? Y%!"\;Z}7?7X;lPМ}B}S;;J_ d'74>rNW6;Mƃ)k^[|"^])u0JЦ6*l4:oLD B_IMV!Zh$WRTD}DW`0)\qPz995(+"73?=qL^geR&? ܪHg~ɋ-kω''[y>y9?yV+K f4 duj8.Wwx>Fx9PO^_M!3w-B+4G.4A1m18c0MEof6.g1ƚQ-:ena>ɐu_"]6"6)2_Xx$:K{7Y P=[>}xyVZ.A#\W{Qe$2/7oƾ;R]ߕ&_'&p`2PְkLEʹyyII" jf__eZ`FϢ sцK4 0|IV-gAeez&ejC,]zs"lBT$ڄtZ?4-jӭܸ ix"x2AVrdҦf36p^$.S4Bېxs_;}+.M3 mg@6=Jxqs.qJTK8Xb˵(,9v-%1#9D-g2LH/yS%ox;]c*^AH1jBE.ԪU uiG{P^@G̮n2&gBdvwɥ/!s=@~.Qü]Gr1 ?̀rx6)O=v^zǔO)()8 +Qpo48g`XQՔ *L wqF_Ss, \[<Ҏ${A+ 3mٔATtiuIݹo}G])CKbncgS%e1 ]IC⤻s}Hn{o:en+Zs4<:r"R)U{ HgfwiDPW>GqmmSK٦ӹ")zL'+tK0t㼡[bʺz۫~o4<ߠB%@js[ Jm"9aIn֐GӔdȋՍZeNV Qp95q`Y 2BQRHRЖ\21Hp6U -İHOrLbv k$?HMy +B] Yr'eSQ"Kf] F*]rEY/*1Ӵ_2qC%0` [1yHGm SQw^`QHf%ھ"[־造wס#4)5x8p((˺gMIo;-=D$ЈAs`;qk6bI#1X!1M7{wVڦgc롻g(1B>PDCq q*MN9EGݷV`?)f՗%ly^S^4L'\3l.}1O$ZP>{[<};=0]S]~[ǻ~u->ģkr^' )ޯdzK7݊[yZc;"GBZ^YeWHkQ6t_V¶ ms@A|=x*Sq$;}܍`N'"oUrv1++v=0K8.r.rSf["KMzq|p8 =, N`Q gɽ$tuLٹ]=մ<[Zqwi}ZaYt}5K_m2ŝ/^M/X.䴿JD_̦gs{(/mHuD_/0IQdZ|XY;׃k1OceDNr(K\;OXR^=OR Ew.4-2¾eΤ%4C <=:=SK-(-tW؆M?˃Ab.CraבJi' Wck;pSގ n+/yqpR!c::@u6%ڸq9P@[*S-)BWkI($ҜP+~UF= Y#˕$c4YB B%z2FWO% :Bq -|,S6NPꪑL$T%Fu + A=!uvOG]5rQWBQnՕPIFu +Y` F OF]5r~*JmvWWJFu-`͕mukqargg-fOA;~eӓ|b]ɯ_> (sU*,o|\f/sy|xT )ty]Re?OHK?mMFp0r֤(A%m TҡR:ouL[[y L'ж_sqzrrg˵^8lޞ4W,?;.Ǧv2B7+A,\b&6Pն.́[kB͹k.FmIΓPM=tyOT.}R~8|5goSF f֞bv1TKUH-L%CV3c9{v`τ:(8taʡ|qCjAv}9em+v(ֵGlkp|Ľ\i{bѪ4cNb*)`F|FG딁"*R&˜OG9{a|ԁA @MLa<-7{]br*\t!ɃYT5A0rS̔t%W_r1*3 FΆr$fk-;UnMXt`MAu8l5ʌRW![NFG tDSGR-PG1VHE*:RA*dr:yjX嬂~Y ΏPk0ΏG?i`-+bԜRlHFVwRt%%vV-2 H=*AY98Qm-^y[/׊JaN:kʆڱ*<LjeNe* J0Wa&a+r, XGڤTcCFDYZOY}Xd[ ߲QCX+:k3GT\!Ʃ5x4d(,Q`WLV3(zMfOnKIbq%::8{1O|%ۄ^uwiǂ_[^~)+;pбѱ@0]W.l1l5ŏ\Lr7C~6?ONO>Ll/[ei6G0a' +/!=F]N8}֩ :!݈XJ|*Axi6#{$\m7[<Kӣ u8v; Ȭd;uх([-&0fOe;a؊sR%hQjd[U׉)eM\)KU{C(%3,Dp @bL G#g;U>`ZwqXJLYϦm1l$h{C*9❱ hk ݇+`H@ PmU$% CI0dGh9:9 t3eN0ǽ:G?ld)Lb|4^#+ OEמl#ޕ,𧗅`$6gk'1>->SZ{HY$%j(Y%rWul)Y'=˷ZN yDu' !W8;#k7ѕ2:oeLL2T=L0Rd=II8 QQRJm95c{X5]g E˺pIQ~"# ;rpwrn49?NH!'AɡiIH@tޥdR4%Nx@YJ.Z$p8p4m9j3Jx%f6ꄶA}„aU L5vk~2+&fg2A]ڢ=~dIoF]yU(; 0㵠N&0tb*D( Va^IZȈ @0pШk"rA$#ʦ1]Z#g>,,+Ʈhc_([ֈ׈Fܚ+SB"BIRL눕4Cf̳2kw[JmcS [VKP&r6rU~ow8~Ey 'qтDq<{Vbi5%X ,*ht- H[D4]Ⱥt\Y\Y'\TLΡ!x% :Jku`(q>:II"b53Z$BF:j1!15cZiSЭ;.4t<a*n-O 瞇FAϾu"[Wq7߂b>Ĵ%̗ !j8S387io.2u4aOϟT>rOWnvp(xp6^ރ`J$'Ivd]L~*{?G \)Z (%ep,e!/嬜3Y_JDuOjʰcg̰p\'CY}vxtřs ֲ61`!D3ŊcAkwZ杰 RWHs sZ,h Ff-[˭swb+|:ˇ-joB,b~ZYI Ƹy5{H/k^QHrpEbJ^ F}ґc#BU wVTY.+3&$9Ҙj͘ ;H^[oY?O'yTby~k6Eٮi!LR"O*8+rQ|Gf.>тIVxaX!|@U5)KqoG fx{l窺a 4!2MLSID)'OɂDФv5}$`{vs9)u th跧!?ϛv-O"Ǚx: bs"1! !rƽb#(9Y洏VODy.enUaBؗi:Gk5^! >"p[Ts=AnCR;n"(³E}7*!ODG&Ѥ"j'@S9ʜh0ʥszEW<(t~{ճQG&+\_˨WnE 4{x0l<iA'EjB*d4lw3 l>9?nTG+GY8V.@,1[>lROn[ng+洵]Jo|啕/yQcVsq69*bQ.~*k2J.> }NkDfW4~jz)F frJ_.V i^|ٿl"Ԉ%5ӗA}髩c&ҽ^oJrP2#^T)*O &馑ʹ5wq (s:[s¤ Ilr5K]3$7Ɩ•p)Z0{͍ƹџa4YĢ̌%C )SD4/NO wpFO]O4 MVt6_}Ïc__s}ò?f Ufٻy띿/h1Cn@:zIM!i&9U(dpg@AG˩kA _Go!͟Lco )FN$LF^FF~M?¼ ף1\"G/ϗW"qG_-U7||ŝwz^SQ+OG.xyx}3"e)A|3M\w7rPvd1 r8M]?)+pSVdG(s;v[k,'wge/oqXF<|nu7W 3ݯO.ȋQUrxfVϼD:`Q} 9|a~\^+%߾xmeq+R-sW@iZNPіs jP W``[́vizY 6)8\9*'bn˛ 4203PFMl^\iwl6s឴aI 7 :7 Pj7{ ocV_MLv;v5 .;|f"Xl]LG<>&Bh@4VI`92IN *L k^GBیҝH˫psMqQs:sk @sJ>K,h@omTDɨh:s1!(\ߴE\rߝUF?Oa^a8TkUP8# ռxBX%alۤ@c@v;݋,wE[83C*NƁ FQGh4JMJ!/?xeu[i껮ђgX*g6F5hB) Cx Ԩ V.hqQ4nm.PW=thw(D*?Z}ڶ-H+exbiI>PA2%h^%X;%# ?"rOrWnv ?l27O'޵,B)AvꛁEl'!%8Z2%ezTIŋHZc@idge~e/*eTuߒoX.zʴ@ޥu̲T嬷ȹs{aa}P1¿e֌!^;$t9y bU_t`U0?AWzjEZ l<_(|Oha?"ͣjV}zWqeyv_z6e<-?z&UJV4nc;d +#v;guZ*; #oKy+A!ng#>BysH]YS:xG  ]t.oZ`)4J缵P]wA"A z Z+3Ez/~sC UM)P`(a6"Ţ.9-P[8G&g@G AH RV_nԭȾziۧ KIwl6 )GfJEGWӛ>_YGcKHZȋ:@nD/De? o*h㓵Dd,;:H% $2 UF;R|N(*v*TkUDA:km,0,=BAG2'((Q‘PFp>G>F0͡ϲ6]Zd_&隂I$|gNB+`sK8gը=l%P%DкuYz,#^ |g"h4.C`xk֥vUMU+lI )z$Hə"$K} 5qo#fc}>n(Wp/ޢ|6>DŽ;:--vVm笈pifwLO=s.;=;="r+iה릾ݽ>{&bY4~֔p뮎v:ܯε]v~5>W3d|;#޷-qٯ }t;&^vH?pӵR5WodmYLvoѩl_jꥯ{no~$նWfuh)x5"u6Jѹ~` =<0*Qnag~r%7MOZ+N-b%/m,yT&\~&c! i/M> `[Zc*(A8FrRQࢷ?IΜO*퓋{,´ta.|/yMG<6r:nQ8D%IqZ2yW/Sʞb5j{RiaPWoP]0Vg*_[Jv碮*O]]1ZРޢBgYY`Xgp+d25w|ON#>( pO_?0MnC&{yI~ɚFIG1w_~c#X 8#-8Pٓ{6ZR Եtr٠ߎ&F\ 6磮*VbjSWWJ9)<#u+kF]Ռ aE]Uj}J%A]Au:'tU >]CuQWZ0*8ޞ^O.k>|Yt4yeKF2hrY\jןP伀.Bg*LAq<|$ֽ4Ǖir"ލ]| 4ZJVTcQ6*/E?W6XJD!?!O/Xa4x5o_eWMK}O3??&?K21-WVz>c C}7V_>x!پu]5y/-(cPE.`;ZvbLp*JcT:dbp]@SˋxVY"ioXcJF^ ,LV@6ЌQaUn'Oʂ0-^TJ"?i◰(!'cuy4o%^z{ZS"SI2H!1ik2W ~'ŗ *.jTVe9K 5`]୷k A9ƌ:$dIN$j,+33k2H I r-k)CI%Œdc$LPlv0)vH Mw-7!U)Z%t̂Ն(Er`vG"$,Xæ8tZT>[zxZ2f߸`BQRtXdeLD*[S)zYZc)Y2vB26˱?=U}ڛtB7InB[5orfА6-mR,mҰzVyHL9%`UjɟV*&O zNǫ5le/M{rWkV!ӝt %E 2CͪK듌tmCם=\QZ13oFUz;qiMk!G- oۿ=W5TopC fG«m^buk?:bw;yf A05G:`Wb*jNȲZM{u:@YR# eD(52`.!圬A &dI5+drZ?7 ǫ U74?2VO~\ חiۄ8aeC.ycA#DV`de4eCNP;%mKDǔ"L,8&d2H!he$Hb #c>-wkwzK\c Sˋr6h:]m|հ{7.Fһ-z$)+ZQJi̦ץESd!t2tĘsFQ.,W},x)Sy~- daBXv^y[{,b.jϝkooJ3,$ݱ:Bhtt$t9<:Tw%:_@dtbG^.(wQr$z$ZU 'kȐ X0)+w$u"$wK@NIBe) Y;c2y =>D$ *DQ*TU Uת"(tX ?`XZ6#g7$z֍9stGp*|uƚzrvqG GBAO ޅϢ;6]Z3&D$|gNB+`st&8(gը=l:P:DuYz,#^ |g"h_2d&Ħs֪6XrN$=_e ELX%xF1lFΞQ ?/re.\}_ne7w&[ِue~?u؝U9+"\S/SO7jtymo"6yd}Mn+>ug2'6Ctn1zIlWajȎGvKnmwO,wk#l22gއ'_M8i2^?=詷Tfc㯆[rC ~k:) "el]jꥯ{no~,ߜ+vlW}.(+3RP JSJ\U4u"@߄":TQrRt29k`?<0*Qna~r4%7MOߒ N-%/m{џ!r4M>olC@^}f*9Sq@E ¡ea)mjOk3g쓋{lÎˑv\#)0lRQ/XBGy޲2Ax>=ms,ZD_08PL &EWQ*wE@vVK!\2˶KM0Q[cdke fD&.29)MPk'ֽK[WUzB R4i0l/vB-v<鉪Q>%Yn)Њ%4c e#g X̙>k[Pbf=v `N1NUYH3Z(Q8#2F iCIʙţdFR3 q֯, [lj2 ﰱ5#gO9S3r\ncT|2-HD`x( FMxE D1j-CG2@K^Gg)ڤ^*(dF:-6bJ URh) Ǻ_O2;@fP+\~'bO]a#hIT*g"= D] PlRevU`@cDq 3 ޽ O<'Z>Wi} u"cM^j67,翢Sr **9[ q]H>:5=c;Ȓ$`({zv%a6fװkw;3= TaNHlbĹ@Zi D8@f,s5gDH6%5y_\esK㻃ydBb6֘A1<*I<ѐǒ+zi0z&c{}jr_g20޾14m\. HM('E`UU ٳ͏P!mRnX;!hz8To(=*V&en38'>*bEb@-))RB=Ci@< r!XR) Yߒ<[ YsMJks5hu| g[^ :_5QBL:;t"s%1L2Hd0 \{oP3h(_u:ue^eP\Z5(2Ge]-M9+䄑E` C /^ 셽kUR'\UxƺawQN~=cAvu03qȤ"JʞMdMF8hbHFewQ n YKZ<k )DPti ҭ̘OnL18Gނ,`49iȹAmTƴ{c:>_XژzA/!{\,JLd3tpK$eQ.-JZL>Wt_˹eU|0;K/Ϊlg_<ڜ>*`J"1E)Bc8DzHCɽ-rDDur)ʙT)CbȆ' Ī4[E;,fx{>Y+(-À[ZaRxTvD8kuޥ&[2NΛ3;&ĝOڱy Wrof{wt>7NE s1:AÌNizT GlN6*:B& >.R&s4Ԗ25*b7%}7KؙC1qٴeCMq?G03]*3Q4& - *|v>+}2|Էg0H!ii@7SI&('l:gDY&#W fi\}묦%ʻ˥pq}#"Np,RPB  r알0{%.p@퀋xεxX }wa먰-U. r<JLeˏJZ$ Y&&sJ`pKD#|eYM;Al!4r?;/6 ۳6wSG]K'Us0S2;cNr ,cBV|Ø T xs}  I=2N\Hhd ʑө&El:]m-} Mk_)E~Y>g齕S7L'ͥL;r HδHPn-kNf:0ȟ0c]_ yWn c~s,LYyV&qJ83:{>3A"DG_0ͻAOlV1>ZIOv7{Ч[Fq7S4޿A&'֐Jxh @N'*>Xr>pPv8s1 D(T`x`CP‹`!ỳ ̀U⌑+!K[dP< uT@-lωǣMmI\Z|e-fySoO,).0<[ŤJ6h,rLٶɬzpKB`S <( гzѐXσ koE);s$8ցf.Y$+'yE p<{`xV5YI \9Y#N`G0~sFY %f'd'F&bw5C!C Ugw H KR /lttO;\UNGa>pYjF4Xcu\[s=`>꫽kwWo۾ 'MOREVQGǓ8>Kt=dj5<4Oկөuڏa-~ f6%a\W\To+ыٜ鿑a:)qMheM+q9x`Z*1{:vDqr<^什ק_w?LgooǏU^HZA;M8yqW;ޛ0;E#CO{P{rz& &Oosmpz? eO<~*{sqs{y<(G%+!|5ߦ!9$zz609ݱfJ8^a8zqݥ>81Y*◔Jwlh/Zh;_by?n#AP|Hv"˃s:˸+xV;]>p; |?-o]9lR6hma3)a_DF T^XH_i#J*t5^>.tx=zmt4 M&3Mw_֝۾1bw8,5iMlRsF6߱Lo9AюEXnoh^^j}թd~57IW pV"вEZ[LAV-96nƻb杚.bq K|ԝ:!jV 1Yj$.[%1֥ҞyѣʖDxLgrbU\1ӒGӉ *U&=2?^P ?<` `"RiTY N5:D0`a*W40fӾ$`]^{~hw~+R}}>i\sgifԭKg,V'8el_z“sªkI5_yc:x2]|Z\WS2wel&߱^۷Iv,j曤tR.8\]&IrѭC8?[^w~-ҁvc,(DW$ -ҹ*gts'Iufܚ V~Qw噃6G:;Rhwgɼ}ūnlJ]ZNg=6.g9lӽ_wu=uY qܿ~V{bb) wUkU>HY\|Ft5Gz6tj~t5QZ|/81k3:|wVIZU0=x+냳[zF77_I|7(:#m^ݯ/ up?8ǭz*>T|}ܾA\10oNgn/|svRk{cZBaFSvAז|GQ1@?3,Dڮ}88{\'I=_|xz*̐}4mg~~BLmX7"-mb%IbQiHxT0}L~"\E,Cψe.6)ӫyE£"6q"|~r/}~k[&jU4m~l?ۏ $!q _]ftҦhIu^KQLR??31-ՓwUZfvmʦ%#8 &YM-L)>Ml&ڧ?0QL-|S l]ܳ 7BW@KN:]M/tҕ ψ&`φ&\{6t5~nr}<]MI^ꋤ+QΡ*csSoГ_}Gxxwjٟ^@zx;&!߿ۯ <rt3YG_p驗np}zO_ ߷I5n9ce fY&u.I1HeprQSO2y7ӾqOr=y}ԶiO]ޠ]?Nr~KC>>ys&|bib}z~f}7Oؿ󓗭ϵ/פ)YTd 0^_g*f}* k?ۛ]_߶璢nֺyTceNZ]o?Wv-;W\69TL*H/rNO.$rr'7(%Wjk]i|^nnݖܻ)ފ%H'?\k) Ф$2cn.'|*Zt<֖~Q^ _=,;U|*q%bax:!*kJ eH[XpM뮚~;!QF.mhCnC+jpí|@awȱed[# |ww( eQ`l ϖ 7<'&ڔH/;- Pzuv`^a. eiuY\zˇRϩ/IƝP_](8ϵ\iWoyਪݒOsE]^tOr1}R-Qݪ+>ص`u{¸>`{bX>>5;nYç8 EnX~cV;Z?۹9 *g/q `G䖕/d%+ Ua1U,x)Lސ<^F|>>@{Q맼[}w7%\G\a-IJʾsGQuPzVjJ.Bk]H2אKi6F2kC?nA&F*;CF2' }[{PnD˧r=yۥXC2KuDn.1 I;I΍-FKwAhHf  9ZBp~V[Ju=QKB 6wD4!%^My J*9*n <ԱxF4p>{*e`0&!Xv 4J91b5%cث{! Au7I|s8=ʵdC̕|UFKG5 T,JVH_Cysͬ\/C̷\sAsL Hn$*;1# }|X@d=!v2ΒcOZsqu~#9c@Fxk>$]Rj() A5})Tp,}y+-@,E>Z+Q\ 9REwYw!'hJ31K(Er[ ],9%ģ::D{ >B.@N`.]Bф ix lT$`5J(-)aGPQCQ`Z y95%9A,7Ck+a$00!-#[qZ"uU6X`ucv87);O-`Z6vJ]IT " l6E6%;9f:"J}w .P*\l F҉b8B@F^WsO J+rz 5AmTz2:aS`b 152ePBdBEk 4l0U*1 |k)> A"g#j;.q bL.4 L%J 2Xů2(u+@e4wr$4+Atxoi¦;:E@(-A(YgW`}#XfD5ڽ(JhYW* ȼcIn~j] !ѿR$[`Bi( ~MU%@H2}{p(>C@ ∽hV4yYgF_-=ګ^,K\3fCCMǘAQURsύ:D0BKSoL|cS^m_Z ^*G.0mFL`-d3x7PT8xi\MPtd9r j6bfZmU&:cx P2X ~>\5qa& ) >G=ULԴ&"`K,^zGJ +0/1 JW|d#-(w2 cm`ªJrC{k=!Vw@vV*ie>A%N&?P|M!- W>ZAgvI~V/ x6mxfqȫD7EjII̿;HJDlvJԁ$V2+*#2"+9 ̌p[V2CjC3zQ$z+B! a%i)Ɍ,|dXbJnq'[Kpp!`~`vNs6_K!\v)&X#f%D(d$| ]X7.CU|M_00u·+fw*;c#DoaL>``A# ߍ7K86 M >Aj;9C8mޔ_]~4WE0S.@Y֊zqS;sy^VK2 bu9-9io(-0;![E'|ͱd V_H>_; %Vt"e5 o}8.(:e-+<bt]gLuo}[fwzVs6h ,mA*%!E!OI<( D,1q!?l)]CNfقY|Ͻɷs܆2058A`MٌW}kN8=5~g9 8RS뎶a&* C"T*']'94Lb֋Īk*̌,vLi,H | fmI LeP2BNb5C:27# pޮ쓙P㓇&*~ [ȢXC˸'lSpGD^r[t71%`=BG%+@Kz-bi1>IX  uTeHU$ 9&cZ..ٜؑ5eCV(gD@lnR+\%F M5ae]N A(T)5&Ie m} lcP,VSTgj{h cXe ed:+f3dWS\XSg?&]ߤ[ _f $]$shnKb õŰ޳ ,`'vqqGw`m媩V\}dC$%Q\=nޣz9Mrk^,v*'KMU1뒫%"JZ2OUX $KAMPL[ gs}ۛ!~غњrp 7:&:KuLWHF$ VW2]"*u 6zSTFܷc?`➔QEyK dKƺ8ma?7o>W_=ރ\"hYyUYBǙѐZ\5s4)3Hc|Ʀt-t͆Eni/J{M n Ð)E $988V M V>[W6.trLFg_>=A Ҏi%YUG=[lª/5C"jc:2yG8Ә9gp ”TAUE1{"22MZeLv%d X&CL*D;ֆhs$wQ,Z]M!OMu8>Vhp7s5#ObOS]`XWyImТe0)ì:NHO.N3 ` m4: s{7Kes?Q4N*DEtpry2YsyߑfסU#.5r{`\̈́þߐEJ@cu\4o_gH{og(*`fs/NY;w= Kd pfßK:y6Pbzۇg߱']A8|?_xx)% rGbZ;0῟aZ%xg)t?:05ܝ z77`qSyx?h%05:ÿCcQ껻I*F'`_;eNx8H\iIee.j!~0۾A) g-.(} 0a+=uY?7tQ|*0?G ^b 3YsߺճF'x_fz.Ody{]QS}:s/8Z,3yȉUu\KEϰ/vy _ӳ6p9\ry_ Z P}RfSprp2~0ͿYU4V# .nݸ3a~^Auw]?oS/k&az0պGϋWwz:^tw[<]LQfլ?hpVUk $a*. &3x;SVfMe+ [%@NI7-֦ߺ}lnھf[n_#GC[lIa\4Li78ovd}oc# ӶV~NHn&v&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&In&Inq&!%M€9r[C"7 }lqr0JInGr[MѴwMC|{ D&!;:b&G-2)쾉LtIq]dpM`Y4*Ad ! Z-6 d%- o 4f:-O*߁G~8Ѧ.OOK>{9l!aZaoL0'L?a O~0'L?a O~0'L?a O~0'L?a O~0'L?a O~0'L?a O~?2ڍsmŇuKH% p\w*%OgY;߷޳NZѝs(gQwkJ 0KRY7HxGJ>{, V'Ș8f,66qɑp-s@t@/A [}[D ͆|x7!ϧϯ dp5/fC~UwVg)wtf=5XMn A4wB- [' ˵fU:d!aK"r>JK-,EkC n62zv6IkmU~.: Xn nS[ g J,x8p9(naz,ܥKMkH V6s+ᬪ`+|Qr L<<8$&~ n x]fpN_`FqUk> |l~~ 7C!јYYgw>+wr}ag嶾w-)Op⟃wkA 6-r뙉v~NɶSZgRZJR&WZ5U\ñTUTMfUUZCВ:]u9ɷHJ&2 6zRJhVTJmdNJՆIcۘ6їPMk%Tcp>i?:X\O8/`*n?_3d2.f Y1^=^5ByS\E=KTb0w Hר+ሯBCt}EHo" z{uϚgB yQJTEAc4ZI"&D#!+FԬmM-Ԡ>k ~}ށPڥ`=zkȞj]q&_7&F$y~y&D_!N14,+a:!L"$)#u2D:<;` wDPtV) \-=tNJeG|X캢{^R\ 8IwdLs孇f o)p0 {rUC~-J١rgZSb-)TpVLP+h)B"m/(5J^`PMTT`1N6Zc"6"J*4ZcB'* OU7+t<&SU)-osɊ6ZVtdž Ҷ 8/liK,af贸#tcB9sr0˙R{$%.LIP:sLDlʜ'Mh$ΙZm~hO%M-ܑLzf-q;mZ[f~=7|=7{'NGʭ Gp5)%Z@IR|.0RM.0}+M)the/Gvtn8X;: 0Wtu\+KWGChAW=e\(]]`IY1tp+m+DieGWHW).HAt5/)m+4!K+A5]ϧ6@!{gaMǍW kyūV1#Fr?&Os(qnsC8jsT%欜E0=./e*/Ȃ sQ"JS ]!Skо]]]I4/ֲBFBWvB{ptDWJPh1tp !Еvt#+ˡ+[JRWXrbWCWV( +# b J^ ]!SG2_OWR.~te%$ue9tp-m Qvo]^\:܋3qhOM8e{%t%:zSECWR JvBttutB2=d{T,,ye[z1VdCU6 }]Z 3]~@eHs00_zaZ |:eVjn+^mnhw9^- GtuFugo,rBW+L ?7떮a wf 6O+IGDED *3|d^5FtQ>prnkH:bm_a[֙idtıXqiu]G?Y[6S3-;7'X,PE끑&Cnv&6 uK:'\֟K+ Oꆘ󧿋:Ok]]yk3u ˏK$>J^!<"^~\-"e6 H, ۟n(mO_5&jts} \7&JepKu3Bv@P,Y(!6P d3Pɪ?7\ꯩbK~7MULף0`Q^S5s :(4JIdeL$,KSSFo e|=g۰ 퓥 M Sg},gN׿T~8yK=&͑>oogg7[t*tյGFPdÿUx4#˯ mmi/oM}_O7⪡?Mk ~+#JzC"$- kx2P214eLDseypg8f=%`)J%P`Ӱ'DR;A|ApOFkaE+H ;h=Vb xGcR { '׾?|!o\e:v&_2X]AK|O  ~ ` p?x[)3`˗FO2aԫ~LYjS/:[Y6ql6WQCoOLF =I?L}l~ԃ'K×gdoi{p>p֫sm| g^ 荠"2̴n:r>l(%+*y0%0TLP@6E(Ev ([b){<ֹ6h2<&|pIr$ j98٬*U>pi@kRȅ'7;\&x0-ђynZ)8Uc6mQu¹٣-EwQxhca|7IIu qHRVp73/ioUa 7l>~ R,^M7M&o3W10Z6L Ndc hXG$x37끭w?{ih0bR:% ΢O^`U:b堳q$% X)8L*Tb%Y굶FFOx"ΠxHoH4Q8kS⻎?{F}ȇ䠱 {ָC6FP]m1HIɖ >L=5ޕęfOMwUeKH?{RLsirn5Ws-!@ ; [2bo-Lc]XFzAdއh s.j,JLd348%Ǩ.-J-&㿎߫j#TDnjk&HLu'R?upvrEsbM=hro~=#Z&bP9aQgN!W$8ZSDȆ'&$n!Yd(PNxR 2 (E!09$҉ivGV_n5OG%`T= dz5ir^ٸ{ Z3\OnkSEm5 f.~74:]߷o>C(}dQeFq^x)‘<[nFZ$"O-"+="ަu@unkPuN{^ۚ5;`<}#C}OCGumíUbhqvt9iײ;X[.י^}0S)w4j|1vL2) >$9&297Zz̩uA.W /*>?&~[yhYV<*wg89ZPd5!ab}qtt4ȭvevMrnsv2}7?݃;9}BhCvKny[y ୛*suBz@2n|/Yml ?@'zҪoP}^ɍB1Ϧz.%7Zz_rPPr,!VVLW HKqC/ZG%9)qM25FE>XtL~";D&}D#F )2%5,DC.Ml]4>E %^G-" c.8SP\DDϕB.2K.x ^M jSehCÃ:s8RuƾFW6LΠ܀"|pRhyRIA&_*-,#vԼn*Fy! >%9ӓ评 "/RK/ZʂZ=~)fT3#+g%̮{q)E݈UE,f~RۙUĝxr8:1Yc;q\+9ԁH%`S06,Fo:h(hqFg)X٠3Υ, ՃEm>\ ,&ÍεLJfFÚV qơPUօrЅ 7w[\dZ<|l~={ ƣ;يu,>EB'H g9K(#M&CT`jldV4 #$RY_Q l&ێ>ʣ&G\e]5vNҖٛu/Z[ Z{@;O GA&YH[PW Q Q;UtFc01$aVՇ:iB&dQ%]E#PAA:.@HY'MBdyXF:D(ƾhjqFԕ54wv&$ƜLЇHc"015 1f"L,nKx3VmZI\&qgB*[&hp+ѕm `%FF,ΐ:^7+ꓯ+.fЋ^iIv#gY@ %tD.8$W2VC.vIo^| ˹w+D8tT&*|W:o8݈Ӎ,(ژ=`' I]=1M'턔p[%*: )4 3oj$oqDNGa8 7YjF:`rKlqoCWI-&q|jodys6zsFzJz^ nnܗH3Hi]G/5-qfBt öxDq.w菏૛=_9q!i5='~5ʚrVqLc|`Z*Ym .q2Oߧ.0$tzu|ʄYWb_:.NRwE#;*>C%vD'U>=?寏pqqzt+)g// xwe0/se<EJ56_ cĶ^"&o`_;OϠz5 g/0zWaL)]E˖?_|4^uhXH*(< D8^kT]i,~k퓶$=Z=xu+pYOV ذY3No /6#> yY^X&m>>Ve\gN%#E[bh9KTV!뻧G|Mɦ};q&am/'e1ocKF ]~IVzQ@wf`ՓJ:t^]۸ƻы%Kg gN/LsWu7[<^=e+Mj.I;fN*IXa;(Z[Myv*>2ItIj0{?iI Dz6̗l▉<Z]|1ulފs7f5r땓g#vÖYRiϼ[eK"ve(cwI98o9W;T [<4:`M5 o1k幵CFةj8&Jsq yqY?{gƭ"n3sdnn{j{QqdW^H,FIlk$8yHZ%ƺs:J۴*Dw|}faC{UM_ݜ7Kl̾|hk/4#4d6nK>wuo~_|8eo?>>={mR*狁z7f|mo~an}c]U29Kk$K G mi-U=}]ϬCeꏟ!2NnۊV(Uָ Dj)vD3NR(E݊o S 5Rkߨ S9%FtޓizƇ S]PN%~ԵkdTcukL`Vm tC gfK]~N?]M[={q y7yi>])lg} SՑU{uVV!{̰ ofǗ|G~X|=XtAOǟ9Bf ߟA ḴWQ%@d[f&&z9[p6w\ZfoNWᛷk#~Fז0a)ڷ ׮A\my{DѼԆ4v~Zh1Շ_vj% flxܢǞѵ6aNf4g_/nglmǛdKUuZbUaV}cAϋjf 12VADAn UQMZuqݨ^wӴغYwHg i5@9AMlgشm,XuAus+#b3S ;lMW%{fXn==S6FژV!:v=imC^\wljYXuUh*o5Vs*+ 8@XjdFА [Ba }0J3+|9,uTZLL*h!v46Wӻ?X!l)d1!vUBWu5:ZRR[l:}/: jkB좪5ow.ZZ9Z$3kV90@uAAf⌌GAJhW!q7ݮm[gXd_fDS7-vɱXGXGZ{lDz.y\UK|? `5VsPa!Jtw4ItI]&6vlx-4Iv>e`C" aПcoEejuOs~ćnyVY|n.?Bz?}O:uaqqݾ{]n?廔I>Mh?]̷Ï{G<7C뱽k^rTK%kuUʭGZ(}mƭi~j# Vo|j)*ߵdkCY)cRJf`Xyr/-۠qֲK;7s9?ͯ2t6ii6B\**]K%<8:D+BF#Ԧb[I2Ή/ɺ{/Y3hf% :tl&=%=z:{`:{ƃHlqa \1E&(WV 2#3+ "\'0ubJW)ʕ㚗>#"`:b\uE +b]MQvsZ 2Ǻb\s+uҤ.WLLW#WvϮ'(Ʒz{~\;V{?ZƑ~^%W\"Wzi \1[pQd#WL0ubʭ "Wӑ+Fm ny_l;d6wYjtic³cATéٿ o߾;[\\.mnSܽGVSNA @Z{m94L3u4p&mӢtd,H;\1׹Ѫ8Q\E+"q_`q bZ4S\i_NXgǸV"WL?bN\{tlL뒗+Z`\YIX9FEjgJE&(W΋mW ewŸ r+E\1E&(Wޣ9[Fe$ڣ{.WL\r{v;y}=qN/\#/{тiɕ!Ws^ @i3+6&ڏ^bQj\MQPs8\/ŲgWaċ?6 'k5ͼ9͸2;˿6gK^h}~sPcb6^|L;R\9cҗ+6c'{쉛uŴ&ybt"\XWL{상)u5IR{\1ǁK2WٻbZuŔPȕڳՉszDd߬R=Jzn7h0#oEvL(+r5AR UI{~WpEmrŔ\ZR_n1EP.WD\-"W/"Wۜ`j\1K)}q:\Y-TVrE>v•cGL]<>mh7:7 (kfkξsIV:]-]/ua}7_׃:]~o4~8o?Ѡ5f@.U<_+ݻɿmu =O]:\x5ټAJ''jy}{łBk\iٴ%ؠ$BҮǽ0`'0HH%bPϱ`Yg`{Nz&Gw<%}ֹViT6&nq`M!i|~r/ gS_ח|z>j0SĽصf#Fj{n6_'/d#nkKeo/,@ED213p@2|}f; # q ɑPO}>wx^AZem{z=^d؝ؒCIilMHΪtN݅] +f=G'h//gk$gJ|%Z]..j .%gzEZ7ycbkQvuRli:D!;z]Fx|[#IM|6oSid|HE-iQ1'v!g5}0 ԜIz̪j35Ժg9ՐJJAu@r=꜌%x w:>Ü|XMѵ5b',"8:AOkc0m€ڈҒ<'Xr!xQU-yKBBC+QUbKT)>$rM NԭhcfC)$Xg dGhOM6duGގ`Fڨ/3ޅ*)ES |FnQx5Wn< umV EeaD)Qn첫+qA[,<[]:A8քFWXLĭd BfCc:vL\s \/ *Ae&*HJƬC6笆sBE(#{*%*Ze@Pɔ2> [tVنp5TM`Xddb!]AА$8IqN +Qn\4 ~**өPm5$$I3i0-V(!ȮhYc@+!7CA 2nP(SP|P BHPED&T4g"!n2֜ANŜY' sG !.A D7)ȗGC%:c TO 4XƩ#3ص~.n+B4TCA̬%RI9n,Y{yGD)J6NjA!N) r{&ETݳ.RVKA=#Y̼Bj\՛F_ʔAIP 6A Z"b C$uWH&мGwU+czh2&M ԙywA[t{׽8V:E$' 7**WeH;L'= N6`lb~;/,].Dz2=hzU ^n]-m/ۺ L}6=Dfxs:p4(OM:V%S+:XF SQd4X(hV(39ByjN$\Pd"UB5 僷0#i1FXi(^BdW%7#4'zu+׸ l~ _6luѮ9׍$xzM`0uƝMN,4zR6\`-> ΝTɌ4QG[S"S)DmzI(ΓFCkBm1y( = o=̈=Xuk80)QC^":$b樇 tyB1C[u9vI,WRcLw[zP댂0dJ 3 A)wn˨zƮ0XB|;"$6b]f ᚛,;0RDyŰ*aRw(eQARnĨF1 u01x:yT,~ xCڨlJ%cT-،znzgm͊ڸɨfzPiU{/A64X*8#TK.dP?mN^Oy~5k7!|ƍ7 ;[*>Y3*8LU>:jUàef@ 2q_HmfhJ7a#U2'YkO^ (TcJ ]\1d@b~pQi8 sfTS.\;mKC1ˡ옕Z" %;)jI-, pnVq޷˳3O7_n>/mgC~ht~6nzYl0~i 8&u*, w>%_᪀huGcx@KGh=(h2RV@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N'V="'8ר>VCw 7H@O d6$N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'uj|LN @\ l dE81@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N'Ba:&'Gs<ї-ݯCGl[.V3Ճ*=PzieL q\R__NaVճj9+׍}wm_ l#Pm`ok4I3VT=ܸA=#۲%{l?5|$xyHB"5q* ;Ta .d]%yt%H:Ethx<~pekZxJ(vVqGWOBW<+ZDWXL1$LZINWihGWϐ$'U3 ^K/Ce P|t8G`ڞUB+OW HW?io+,P{Ʈ\BW -eMRuf_]^:<])"/nZy j3(+} h]oʺ\BW -UMut0jT{3+ m9J츔wEVxXl"}rSɊZ{1/rye2RB\65 w݋O{AxI+ˡ?_;̰7;DևG{Ct"˜F ݟg k'H6O O&>ת7I_v^Ɛ ~[":d]mCbIׂVyuNn6&c(ł hb[Ei$ )ťS$h0qπ4/9hl!x9 $0]ԐT'cRj<[D+ @P.k%)$lp\v)SAcS`lV =XXA+)JfkG1ʇ]) ŮefU;KNtOnu1_e,Y8!s3;g;AG@ݪ6)=0T`eN6*Zl><To_ƮYTTW'4X>c~UZm-ϔyOOAP/D7(w/MZY^ДfT:?Xh ?=L~,/<Вc?Y.h2ZܠxQ5IAR07!(Ye\E:7(a^;7wgL)NPMw 0LkF-^%Re ЦM.>{PV=f앤2SG|f)TsV6+6̎\/θk%ig1tf L3!v#y˴󖟠[+"9#&"GZDd oeJK+!b-F,B0# )V:1тFQ4`G |kks;t3k\]Nj/WGx: ;/lnwW]M5ߙY.N4e8[1?S hTBUT  ɬ䤍YM)S[TpޚZ5ịUhZleF͝QFG`#*$;A (UtԬ[o=M&e^kekOWtaȚ<881ȼԂ}?#δA$LYT.%r)+ȸ#y/|Gu45c8-y_jàv?)l2jg6/+2VHߓf'8؍Db$Ƒ!4S-FGv.#gǢNIF7ѽҏlAU=o#5{1,'f??>.cK".8),3tZ08QZj#!c:XTPEQ!r)1tpb "Ü5,M^sd +t&hu VÕb4wxӒ4^JNntb΂wDðqunb[o;sig|>Kޝޭɇ͹)n&ivDp7+_pK[3>l6a!u+*(KI@S#IVL1[c#zg2[zRO<_4 s-J'Q,GʁZY Ԁ"'cZ'ƍN:utq9Z\JΓ V)㢗rB)/deF/Gpt4DJ6Xoӆ(}pH]2n0 k eYl>qŗ>^E# FV}cQItǨ(J![댷{x>cfgñY1ӵS8>hR\>l27,}VD\P{ñ-}51ifpdp4\\7驹x?B>"%80IwWKY59?\ L:z= nt2-惷k?V3G'b&'G)@1F?^|VEѷIw^X9hC?t:]xW'>Ď\L`}臏E0۫T8x<1i Oq8Ȍ]#g/73VLFCw18{0˴yI'K;\B|}Z櫦B{G^AN~4gE__CP]ޯ&ܯNʷ$U˷7Tx ,_WRkӧ;U]5*{]\GTA$ubu_S$CW=}%Ab3ՊKX!qG0a[L_P5R.q~y}SQߍ^U\&Vanŵ5#h+ߔHWaы.A_o[?&w4v|,v>3\0/ L-揌)>2ҝނt/+w乙J[ %@eGNfgTPn^/,#3HwS27 1Q<.-/Z ,zN*{QhE7sјV='Ƿw<,[k:hd("Yo$%4CS=8k>э28>oXj a:_L /J"B[#v$0x`BI2N*% ا;Ej!$.~a>_&h*9:>((F\W xR@1WYWCC,x(ڪ3M\Ȥ!rzz\-tW #Ѹ²9N l+g Aި@M4q]i4OU`|c` FjFEM{ I*N/Tj H8Mog{HU;w [΃@,LڐP%n mF,j'B:K0!S&J;f R@LCkYͺR^7A:Mi0|Sʸ[Oc:[%0.бp$bR%$-nZ @Mc^^P(t YIH@Yuz8 XEdP.!#h55HfZ~_=}oFo 4HHAÁ]vFas!1=vۥFAZ?u-SO-5\_[cR;ĬW9jRz\Tknm#G * w/,,qdd;9~EIVlDzddj]?"L=n}H_~+HdY^x.e!@|L'2OrY8<w^}azia;i9MdHfz׀n-_m[8%Fg;"!-h#C{J>/~S\p{tF[R eh]TJpw4⚒qwqZE\٢R)dVikB6Z Q{2r A[j4!kD6k~Y\l8cD9)r ֨ΛPݦKw]~L'/npӭ=~kq}nvS:,F{|a 1#^=^sMjUx82x ;YHh >k.,D0` T'A;P( W9Ƀ H)8wիn_/amhb:}Rš$%CFcC Q2,utrV/Ǘr:O~*ws [@Hv?EnU4{@⶚8>l}CbLU6:6:x̱sϜwQJ@Ƣ/c[bmpjS3GT,`l,Wc^c6jKTv0 43 &%e &%FQmt >F**9%)sLU 6RΔҢio%YAM 2*dFXj)^IT;JX Zʦ}(CYʙ7]96\QK|V咍լMŤTb*%iN]$}Ucv E+֋{M@ًKuoK̰X{dyf\;œW[V?Q[1ygr&*Xw?[!?k j, /ýO??hfe>_6DhV*,92v K8,lLg-֯\{jK>Xf?=6Q=w6] 9ˇ~,?ܔ2ɱ/%H][c}_?M~Y~ :YՑ_ϮNN7yQ7G'˫t4?&寓n,;T1{{{{ſq"8,v=Ջdݿ_x(։AL8\.j]ՉyA!xC"2і`01m.eL2Íc)R{r6־K9վnpcxЅ7HuӫKWe,^bB$PءEŃ6#z d\E/S\$Vg a-(0 j.J*@aﺬ7sPz Ѯ=vSk m7Nnr~sΈc톥FH㈎BӋ&>!s)Ϧ({17"0*8Tb #$z$dzWHCwGP![*.rhq* |YT).xzӷ !Lq2UVJu T WP{!uvHt o_=PPPNp>>}>^|2̈́SՃkU(c52swѳȣQ[4s$(URP%AtYߚ9#\mǑRb)VCt(hSUW 5X9*޳靜m:;&g/mbg[N"vd=߼ wpŭm&5)|qΆWnv˩W^xS_\.)Om#]yNm-Rև؀v4Q!?67Ro6zX~8-']~ʏ? ed#BVҺi{_'_%p[##9x5ė{Eϧu"/m˯n9qV=h] L6<- s=vA!O_Yߎ@}7f3ner>,9 B- :X}Oa_ɰZ?B 4״ojCڟ֭kGRkձ6KBײ+'^@nr,EkE9566i+Xd\"r%y|Pi&sC0鿽( 7o;5['NCl^`+ԋ$̱aKalw~؈%j.E?'Fq \}elUt1!*SZ3BҺ]_<{D]RvU-UvTdr8u`ڳE|`-o- SB! $.SE"ZՒe.;ߌz(Z`.A=zED2 ^)M\L#3zU A.%xe*`xT#0*T'縀:Yg LﳗOT9|4okU6F2x1оP"SrQ@G:@O~+2;A*:U[Nk)A$‡U*fO#'qkkسI)0X[ 5_@%j[UWʘI h8y86doGi_]tlsJ:N>+\يXY4(^*Ƶ̞+"2YE(_7yQg阿zԐ"dKu޹W4\!Z'Q=KaA;Is#t֎=.^͛cMls C%.xM@%$$&J6qڒd8m YeX'@l9: A-A`z.iL@y nǑb k2AsCPt!(5UT) ޵ q [vkiN v '7;gWtP"~ 䉀Lߩ8[+!DU\@l+g+ߒu`HgWe۾iIt#xWjBOY̌4FЦ@^0шE$Msoov{>.I.gL˒d${XYw-qR$﫸3/!Fgܓ*n2\bhMUv???rǫ(xUTW>(T+(%XS rd3+`629+V)SΕ)Er"1*h[8՘9I!ࣷ"J_ILWKK\+ӑGKsN5~@ɪm{Jmm)I0!_rc'?n2F#6tz97R^L8(jht{-4m`ƸG v{w|?gKE@р &N+#)汙HwX=xecͥQsDd1]C:51.t .匾Tu6*$!{^l4*֞ŒQԛ_tGubŊ7A7wz={ݱ!tJw5F67}=mXK"#n5 K!gol `(KFEyW\b|C]![Gc;ŕc([шYlˏe8_$p'ۘmԃ:`Ra`aNh:N`=!qx _p-˱Bٔ v̐c8dEtVD2a3ZB i7,YJL,[> +eҪeBOR5I|B.'ߖ};/^}aziaߛ~-qO ؛w3ft;ʵcَww e o-ҷ6얢mlaUq=ƼlAUΒG:yCESQ XQ4Xa#y(w]#" GA&ĜT qjX'BeXO+%Y*ʙ` }䡛M1qUx%ٕŃgYϥ@@.Z J^gٓDglC+GTYTph@ \IK4INX BzRgE6~a&3Ce79yfmRC`܏TȯrUa3: T.`YoTN/@NTɈsggskΕYoG+h#$Jd}M%S}N;Z[UTom:-c?vBl a{wx/2ްý<#<]7?!:;_q]YyR+jáF -JNX+sVP@41at]-6TЂ=A $f ~?{6eʵffMR3tlz]~t O[YR;T^D?d[6m1!{ +D{C'.DPTÌN'fb&p]Sf=k썻,7i]i154(+6<ŌBF":ԲQ>ɋY1 4C,t4H$bBGc}RAy6v>,Y"ƶ0bcq_F 3"gC;C'Ѝ1rT8ENϮA$QPIB,'D$.Khm8XG!1gOK&A% *fH\rpksMm\mlLJˋ .Şowб W"7AYoa u+mL.+oPy1x5sSq_>l@.4?78o9<_bS8/87)( ';8ޏ[qG$3qG\Jw$eqGz>ȋ;"‚v`:CWiɮUBKp*HWR!̻]%;Q.A+tbvJ(I]DRr:DW*yUKUW*tP +%Ctk UB[?wP7DWM/ "fFWj7>/]ǂmtJpJ@W6=E:DW 0睡WUB+u*ԨHWcY[ y_c9Q.>.zT?Mqp(K8+ 6@|#77tQ&PchaQ4xNSyTI!gtBZO ʗHK UDW*tPJ +vhW?nh1o;]%HWat*YbHpYgZzc0|ˡ+1KJp ]ZZ]%Ջ+$i7n`sgStLZD*ĢHW#{JW 0E ]%fJ7CWMdz~j7gDZLn(E˦tzoc,XňvڽJ{ztEzZo!Jp1 ]%ʚ +umkr1hAn&N?N:xM"26cc`$\9y݇w?{acOT0?2Ǔip]ؓ>e94 E08>\g/dpg{Xa}Tʓy$>mC R65OL|_Y'.;F|$_u*`iED([5H)#Y̘C:,b]+zP+nʻs庑;kUE>Ӓ)E ?iYF~F0p[ZQF)3ZdZVeҲwE7UZlpjNNiƘיre:KJzl@%z! .b|ԻLfr'L-PBmZH(u?-ҵ`M:CWp[vCi*$=]DDrޥ,:pUBK[]%LtJPDt(]%;^| .:sB+Q*T:K+&ŗhW s# -o_BٶzzBբǩ V:x03<%,ɫ[5/ZѺ8_1N3atC&S 1E Xy!t3 sGX.0яŭy"OZ AW{JF $k0A-S&3Vs-AȝRSj~THT4/[;ٺJީP-՛uC_$w%AYw#C:rSc X|:= kM7Qk:uB%>ax9iL$?,N@!{:> ?F%yd/{aafsr7?{&3ی< seE.neI ukI)>̃57~J[-ۄqY"H,ӌ@H''R?i+OCHΈS:z83 ',HQ;mpMK* `R&R/5eDDOA #($#Z3 ],45.ͧϿF|tVZ좰$Y]M^WC;u>3IZeyS=8 PF%d@XE1ZJFNȜ4˔5`1VYE 62(#0R:Ut4܃s{V|:]-^gWo=`=n) ,ɲ-$-ǢGmo ؑq "ed:r)Q&HYIƽ{{T; aakbhq2XN UI 8?.?b[E Ojv7jMtJ=b$xAj`RLmH) 8J(p@R(USnV`Ř퉧'3Ϸu[JA׬# FV}cQItŨ(J![땷{xb槣Y1ӵjt\\ZmNr`{t:MxAU{g*yv2:>̙G2\~[HgeR:0﮼zʻ8:.{>L:"o ##:wgqK|u13;dKm%BMF+9n] 4h:_kL RgVMoSb/"~A4bgl$ _@>c닙͆˒T5hӮKgGp`Ƌ7)yGK;x}5.C{SGQAo~0y_AP@WF ۣ3z<&?zVM;oʫ+K2 V`vOv-3bV|RBUNS@e_h$,oUh5䇵sr})eT*g ^~yؿ;|\/OY(u2,ק]N`^>"fuO۔sFE)ZUI|ɬ@3͊+ k6q@P>ޔHWnUЅ_U_y:.N47Iw@S-= `}cpv9,&%?8!Ui%ZȘRo(!TdDyYK}{lߒ4WҕܸU(>2$Pv2;d~4T:-A[ƆY$sylox<^Xd4;uR3&f+٘T59Ay UE)!GpL1D(q@$R`*T3뗪F | sѪfDtI ;S%.Ff%:/˱8nUxz`z36INi[E&Znb?D96\~,Z]znCtSMZ)t\EH!zWuh\:1kcT䭁%(aBq$%m8{vLrD Ak*k 8'6SR$|N"`S~j ݄PWz+E|f%Ɗxu`2&*VHC T]!Qt; 踤|]-KK?·c1m rU*']19Čb@ʚ;綏x}WݪMhmZlIM{[W/3yħy*rk-F%@+-Īk- $CNd&"Gr{'`󔾘QEZ);sOyJGp_f*푘`M)YD*Ud8x:S+,d}t(l!m|_xц*:yۇ/;gYxER9g\ye&XoCWAC׿/~]6P=df}Yˍ}Fͦ{p8mt#M@)U`՜Y W}jvրcα(AW ^;gm,&9mCQRcȉeՈ5X4bF`]Y#G_5JT +GgokMnC8`0 7ݹ.woŕl#~%߶*tMÞ:$ ^b#`>O ^ ^.'^b2+<\b *_{ B2k"%@6؁7Z#k wjȊ};},MjV9u[Ue FY s`9'|b0ا:(M$ `ZK 9C-fgFq]HD A $ujEջ@]ʗ+\:/t5ݯ:D]⿧n[ubר[aæjѦ@GRʨ Jmxmx\\Nc(WܖJ/"䢷:r$y  M ;v֫T|D=:K:$Is%W2^Y VWrXNI sG#DcH @eO6'4Z-*=aպZ,y;H7q@4UB!2DzYGMh('YZ3FڦCXa,S̻yk`_sUx5粢 +`@\)3gmkhoЇPԯABadxUEn D@RIaLmILWJY+-s^@vHDC5l 182I’y%ˋSdoլÍLeSo޲z65=Ýk&J'HJʢ q7qnҢ:v&eW?W\M~08$ȡu} @bB1 :V){Nw(I$Ogg<΋DLv-;$bLl*DE>ȶV^WZdB*tMk,"{eq} Ѯq=vldډ/\wҭj%&x7-ӭn@9N+sN"}xttzyg>3y.|5hHJdUoY =vPHd CwA@"R (o箊@tl`]T1@.mH9mu)ۘ8VB.5鈈 LAڻ 8!Ucl7z^ ='J'C39ap}[>^E31\7JAA;#.'s Z4sGOu#(WL@Noc\8Ger(HQ)YM] {_[u"jqsbTH#sTrMUPEJ)ٳ|A_Vp:nqe:&?)R'?)姟Nv3*+`Smok>nn̯>̯ͭ,~hӢ4WU*6Y`8b!]5z~q{OgI}l퇓!Ŝ:Y>bvYv~MS qGq y ㈣ymkqq~_?^cDLY;m8yQ3⼫+)F!e8Z}5zmV@s. L.N||s}gT `3&2/2}v}i1;D+riw ?_3װ/ϛnC7$+cKBnm0&2kUA٩ɵEr ggo<|ElO9^Pa_vÕ?˺=|puqz>>:/~m]WWw\x700ஷUsuSmi@ܺ|K.ӡȄ Dա抦l+eD \rE+eGKPVCJ0 9MqCu ^"V74o0s5^+Sm5ԥl![y/M7=_Md'*'VF|ޖ QѢ.QP`S%.lbNXz+W#נÇJВڽCmv az0q&~ Nua J'qG+SJ$3@6&y5.[&ч飛֭d*hFU09R KƪQg t2ܨYk}y1͹ں+ydkE8F Sϝ `xzW5*NB&崊;m`|;"R[9|B)~ci[` Tr9c"j%.+Y43Zbd],wx  d"i%kd@Ph*YbPBJ#B%U EfЊ2L4!DF٘Vd=U)vM=lߏON9]|ܦrh!hoT1Fࡢ zP OpVj-()gYN&nߋ/=Wgφ`TBp*aѵaJQQ~yQ?h 8AnP+_Mq'oCMʑs>f+QpD+)WAiph{ıw?FP} 7x7^n{ERX/&gkZ$C *Fp9b L,DYxG!ܗb/󧑡>%W }ϩ\N=iNj5G_]F4a*}lS=$('+NE⾐E9E9J!Dt(OtP\;3{;ҋhfL+7ca mvUZotp&J$q&t )8Q?hdL7ټ%#%$ )rQJJoY=gGpVlML:}>))1BM]?W߱"l0̽O۫=\X4ظM@֤keLHԼם5sP&µrdWXX %7El+LqR pUEb6C5pxLt K&)M0IīyJ=&/ngo^/8_~w}}&ɲ%5Tm>p4ZѠs0 r0'-@9o]w2ry7cjCSv䷦f>CؒS|J ha%vSQGW'9|/dQ?Yz+WyR;_|-v1JگgY G;=q֡ޟ13ne<%Asapiȱ,tyjtC^?oVrlNkjL;S'<V/jur<,d7a?hOH<琜C|y5c_9[/iB̑6@N hzȕ-|v5_W#w\FjW)M,XuzUWe\$dRq&e0 ƾce`_ԻXށ.cX] z/n 6ŦkBΆ@hsn,ݞfuz"QiY-*g[Fh5W×(n*#b6}:`C"TF] $Β Uiֽ[6b5dv6W&dnO,ՒΆM-?^L`)ݝ{dNJˁ*.gq? F.u[dDOrMl]7 B)!aIު9xgQVgޡvFM_^>OXK)JڈL) xmAz{n<anf /ޚ+~ѐ񆂜-Ga/_懿>ao?ǮhJQH ڤPJeA'[bDZbY|q wؖly -4ق_CA V! 8{촘!ҎC0@FB,PW˷mlPBU&-Z4eEYܭeڄIGVޙ\=ܠO9Q/y}ٯW>Z,//n;:=]"/ܡokޡxQUTBIٖX^pN 2 rj DI'15٠2TflP *Ae6٠2TfoP *Ae6٠2TflP =٠2Tf#*Ae6lP *Ae6٠2;*3qQfeZn?qUɓSJHfDYv )RM쓯}):?)I͵tSJ_w*tU@ ,`)QbU[٠%r` Ut fOsy5ybkE8WA4 AkLDV Թfh7q(tz@5C۝"\lMdMyʣY\e%WmߔO3ۀ7iTnHqE NK@nN,4,,)l.kk(E3#{U95,NjgN,EV)\% (O$25YҔRRXljgQhXҢȬ Z;*SC9UbF>U1vM-leW*–1h\AcUD `5Vk)rQ#Ka=MÏz ? Z کUtʣ^k)R4T)*Nwz#Nqҵv,L!Y}VփjvjEQ;VjP\Mi8|~P҆tbǴ\FaUSyg:^(\r~dN<2{lg,}ELR7:U~j#0$\=z_R`4| D,E, 8X.<5;nnu~MЋe[Xs<\#LS#DQە(+ P F3XT7*6j5[|vr PKb yH~в4L/GJ7[ō {\sCrR)GvX*S5G#A#6F (l"/EuEPT0Z*k%%ֵ7=-VE?m"{dkB(uW ,{V9` Q`D9!1ϐgR,_~CS>X6;˶tgm z6 H!d!B.B "8!Hm{qPxkvƅq~8=zXQ-qR$}d-T)9}u$NP8fA|uXPgQ< ސH)heQѨ0*T'(%8s r4!r%)Qvfvғko'U!,{u[H |(]ukjtI:٨FtJ_i0ӛ%^ӁGKާ^e|8Eh߲({.mؚ:>AGtӉ>b-ܹzw)4=sK9ӇK=\Xi"iS&N^+#ppeLT'KN'o7~alH͌pըS|QfmIS2g$ ;t1* t(V 8k&V4&KF108$gʎ>܏$Z)r;fŇjmv1pȧ:S, M.hɴLB0$ stbS!gbG|ry7 7Oy`k8NdN{Q*37Zẖ J͘&V2Ik~*{s9&޶cnSڈ~uI7Z`Uy_uؗqP$dRq<$1Q2qdeo\җ87g^nTlއ+΍m ){@uDmh/Ġ٦PtU[m-Mަҭx_V{5Ͼ,/Eբxֺef_|v.5QzUFlvjy+'ͼlzw|C=F'*>F!U\S*n]z>h`tx޵JHحU偀 EXGdL)"|yz!&eW/}Ǚ5hKEb]q9; ZNOlOY;?Zi>Ԣ1Y'9)B#919Ě횽ic εPzt \MJ( zT^tx閇><3? CS]y@]_#!jF_1 b0Cw1%2x]}r%|z~ հ ”7SNi=O;<}O{ :h آ[f-0ZU1X{P5gG.hKf$[KHP%RXm`M&H5Ɋ5~vgƗl|}͇K!Nrva=[.)^^s4)-,_> EdLVȪD֫Rk`BB2 9ՀeRTĹՂsЮ&J1xZl!#PœJPq(FU:M-^bq^7ux^;ZFSgn+Fd<8;gש,>̧e7W5wDoH̔)+o ܥZX;G|qNQre?_[/½czؼA]ALB&1d*JH|.*LrYM#nGܼZ tn>Nߤo7v,Eߟ_7r޴ꧭSS?N8\FQgD :ӊ91N%bRPޕ$BiwVe}0fz<=hcg{q>hD^adP:(R)J,\!*ETih=S ߏb`*@$fh$wD9.υTshD.ԡ kΚ$W<>)"y]Q]nMn8b4ulQ_d>[\̛)5$ \ E)RDuQ.?~^@ d4엷'yЙ8+?&|$X*7/='|vS~x]N+׌5#䗛q<[;*y94)K ϣa<_aN}0|r,>2\O|4" fos?>Jf CZN[Cے?zwwneQO-[Lk<àԱ| }Jgޢ6w'aܬy?k`k,uu" .s~vQS_2SgҾuJի!g\\"Ľ9{p{ȋT}9ba\# 1]>\is/hTlyvyƂZR12WqMXi_2_E:6A6niF";E'wG5UOR=V|f*r$rjrió9WNؘ* $;k{cգfE&1a;g{ôb3-n6\1 Ss^+J/sOQ!l7؆p[mw)".C)AO겟̌UnDPMN/CRrDE |,Z;m0QG-ԓhY~g>SiPU0C RYc6D+$,0 /&M 3BlI6@E4)o!(ЖkP}$!z\NGP#!F.9Cզ'|?xqlOm8}MNCY3"^Wg>h@}KM|e mo-»^[I"$RDrA*AjdO)%"Ye}]@NRHR!J9hJ]q΢*ōE$($n;1 2 $&^|C@Ѩu i8H|$(y;TGǜa:{$LC@ 3:',H2P 踨|\ձZ79d|2&'rR$SӼQmr:#I1ew] οuh+{78:w܁h+C\⾟.Z}XbVF.rдMBUߩo τ]eըsz:[в3RႱFB&hQ >tT׵le{};[`09Kr{6iiInoiZEN gz(tڨ8-d`cUZג{:\V&]\$D &Um` %@aep6]Jvmf5 Q~Jo=5ѪD2uAybPV2PMuR/N'qg5z|f_\ȥ"M?:OMfɮ{J zX`Wmtx@xU)(X'j?ˠU3я&!?xy/*C޼?<'īf'{'J!?eF-374ݨƊ32E8Rs1,e9+wrx:᫓ҍSD"J'U~l^-bYNWꆦ z?Wo)1j`~6hTs7.OwfpZ)3@zUJ|z?v}H=}Cn݌WÕ?ã7\oթ~U06hd=OۈlݚQ7dI:{?vmw~dvVlOvhiY"q='pf($xnTr!`4JMJ!>'/cOy\+Y$I7l 3rjh5Wދ!jgȤV39A#ڡ +&is*[:P"MmB"wQSo7-@6#ۮW>~p1i0^D _ A@ "'J2T,=fm>)ovi)Gb>-SI\y,U. ;AH#3gbtIZ jZbmaDY*D e:Qr[vBrc#[iV{@r3E¤`tT*L~tC{3BzC?|콟/<qɞi*O'Ӊo6*\Ɓ=ͩ1OpVb!ʁGVy . W@ά5Jp\Y)^!F^s'*uO7ƏzYīѰwk<Ŭ(Ss!c oa>#.aVy\Ow FUA%$4d*J"R/G.jr;u6x^#AyuC?EDi法ΪXj.oϫ!_[;<,{4K"xZ 7QmWWu2m|p|Ӵklܕ[L@تnY,yԷ*k˩>X'Izd1dj ۰|M5\{-xvyjWݒk՗zC5onc`ݶŤŵQ Oo!0@6kU5ϦyVk [Yr^)"xD6EiD_rڿMdh;ELJ5l?5;RИ"X5 M ofTynsl ;WO( 17L F w&ԔRniIRUR;FtpRĮq{ňڴxDQDL)Ur pE;e$'3PZ-1[ `+=BcHA h3%c|ِNa&# b hJ85aB=j$"_jp.%o-!CX/*.-}%LDy I)FOyVہQZ'he/X&xQGwRg5ybA3BJj e>Lcd9 (]rR*D`BxQ&Aj# hIsEbPX-Ea edq2jW>2-HW7`;|ʾtgV({VAA*I$3{TMcӻ i:i.fm`)1 iYhpJHadԜK킥޲=S. ϝ`M2;2gF(f()Dh2\%Σ& ˉnO7s`rjqlÍ!٦I%^lDA&'z$-n!;tt tPxgZ%N 1 Q`Bq ]W;݅{*5-*F§X{mbNI x `Y $"R -t5d t4;=esE"O;`Ua!" <B V~ܘoDT{*m#ɮmrݺ 6>, yZC}ʖ"ɳ,\R53nI{F!-fTսS]캽ý71[aVVƝ:>5շWW7CFp&";eB1ӧw?AウpL-(2`uEǩCZeAt,n\ ]-kJQ/$]Y%]Q,6pRJѦtgŤ#] ]ĥJrԕuv)thޛAEyO`pGaIf-Еt( #]]5ծr+ܸR;]5HWHW1 KRW 8-g pY ])Z+EGuut+upe1sWܕ+'=$Ʌ'za3^vCKej7*^@Wt=]]ppR^BW6}+E㑮^ +#gbb whwJiQ4h1д}+} HHb,9^])CWkMZ ])Z&XQ  >.8+Kf)th-;])Aҕ7a .gNrX ])_zBJQ޳G: ds +b p#-fJѲwRrTWHW%DWLCW WRJѺt(}8U"-pH+R h}+EIǩ]'=1ɽv+j'ּ}vCK/l7v[B]#]=w0//d .nhӕs+k] Rn9JR1;])tut]qm"S l^;}$]Kaxܵd)4Dh{G8 DjQ:ncMwO:c,'Cnämb5 !ZmG雯fd}j p! hզW%5"zuvVgf}p ⋥wˋU3ҥN\%\CW Е0ZγJrG:@r,7+bJᆴRiP}۶HW ]yNDW 8.nz%"]1dJRBW@k *J{\,}tY4w] ]L+ZkIWI<%]Lz,[R;])t+~dsW;ew} }톒 @W|=9r-س[ ])\Y ])Z復#]*teC^-pЕ~)te_zޗӕ:L[QPDr9o!>#ۄG'YJߝszq?O什6dqYGjm6߽0>SUq/^ ݽܣ>5ʇ;28 E }vW _u߽Ĭ[7괤kFvqt[vl!_tŀ) { 3J/뇛[H #>mUkgZ#\ގL՘D÷$=[));-@ KQ6\FY[)Φ]H2 {cu!lmlpo$#s}b(Nu69h#~^hP&&amB@A{mP(䤷6gAI"ω6{Ghbzo%.b0thFKaFy8'W|(OEFNe@o߾I5Kc8W[0t": %z:Iƌ-F֟KAh!1ZOfhO.Վ T4jЂ# a$ݽ׷;M4Z@@s**Anz bdlO!G cr1F<]5~kb1J!yD!8|Gz݀F\2IIoyM61\Vh騞:DyKH3~h'!T {ΛHR0 xta$7l @xבRsaMz7'b'S,9j9oQ3Zd>g Ȩ&OMΆk|`5 jkKDHR8f}֥@֗ Z2M&"Z+QL 9RE5nBN*dn ,(2>I0kϒaQB,I;:K+4P YeP4BZ%$`9J(-8`fRm>h-i`?ԀܧGtV0kD(QRzsPyUoڕA[]^ǖȭa8Z"7uV6PŅdCw”X v4giF龃 pR6a` (JƬG5lmbeȡm+"]hջޠ$*!yd[Xb9.pGZD* +wGbl`_a: \d#JsYb$8()-w7iW-P oDx,8(&`#dy=Ԡ ;B@BMP!ՠb~8w(c6Yo]Ҝ`B‚(!2] 4E\RA[[K : 9 & cb :TjMZPgN42\p*9oLyլAὥ ,(J8E5n,\ B!Ȭ=+& 2|֑: q*4Xvw Jq&NRc=( E>| ȼcIF?5._)}`Bi:)`3d,["1`Pm+\+#A |-/=J/7nbA\fΘ I(N ͧ1LUit"'$_r0g=bi:O~\0_<T`*]`"AیZ$ >:gAuPiTtV2GNR$];VX5U4XQ &d馯_]]DdbG`dNBfȈhQ2ǢA]R%6ȋY}L! 5y.x WH% dp5ژSX4kpⶦ3@hՎ a:<DC7HX] @Nm1Y5(#VF/(yD $2Бx9v`Qm,BgViYTה!6"7H 7KqVca},8+bA ؉KuPkV]ot, =i p–@ pf5`m@Қ rz魙+ ." ioV! DpᾠGçqd*! ^r7 ]el8Oy-.^`w|L{}vqWga x@,gO`3FφkOaj0Ppljq(uutkVȎa3d-ÛAyFLKoUfmju I y ؓZNU D5)QjWT*X`*0[SAv = >bxHX7oS3씍}u`bQD[H' Cɟu7(oV1 /%Baڢ tqQjFR<MWus[s [` <4'-1\ȴR+k֪U:6 |d^3遲XT;W{'5hg=sPqac:|›k΁ >wa*C6(=< @P4kfàekF3^ V M F9,ͤLJKF${Z7XO(8qK]RZG7XЭv<#@.I`Yc6\*ʐtiL  XTTbh&u˒H 84ubEjF4&Tygˆ;i0BI>zcTv.qi-n..]Rpkc5:gG~Zr{SV}[Kp֭-b^u}k]Sn+) V9].Ү\Wt7[mtqW}>k͇1?-|ԲDIcYF{tP7CG|yNݬUApmJ<Xu49W@5ZB(<+x__N)wG $Ծ3J isc+rh@mH DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@_%FKJ =SQ\.:J @x1 )H "%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RȀ{vG 3J @ku@iR~MJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%*F@'xΔ@Wt@JoZZh@1+I DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@#%ש@[bL_%}ڝr.g˼^/F@uHhW%kp ѺֿP}piK_ fJ;!wP]+D~%TƵ}+s,K;CW>wcm+D鉮,gt,u};Ut(5'CIKkWxWvfDl PzF>ҕZ!Nu+thDRB]}3t8 [Ծ`!{};>v(}JmAWCυv3tdŴ ]ZnT o]!] ᚛ﶟ+YW &NWk}+eT#i1 q%7 2?68cj >.F!t2 x E{qݽ<.h<h},AR.4ab> jWcquiorShZQQQG[ǯZIտᷬ&&o𞝏9> &3^*ze<؝aM?JdQ(0_"9'7-l P^Y5,!{JMq(VH-eT 8+|w. q ی, Ǒ2*xSPVn2 CX2)uga^7 c}XOtTJJ}Zn.L1_]eP"B% 7kv1 o=_jk][Po ܐ l(jٲgaܗA(}-L߼Rvh]ۡ--]wc޲wqBO>m+Dٶ+'+`Y^dU dx۾ۼnuÔ|w|׭~EV&k~/l_ѲmוwmewvW[ gw/-ikT_y>WU)浰_@ݢ݆F2j6Y 6PSbRb˖4 'hAN@7uoRHX81L(7ͱMu9"4}~_~/x3]9 r7ge=ߌ_Lz&ٲ ώj|Dj z,T~6(\ W`eKrs lνaS Lb?:򥯜+vJbY)i,`x΢ڸd.,,6 6_sd 6ÆY.|v4NL&$A,QAdT$wشR SqJdd&1Be0>34 e|Z(4l*N\>]_f:4[2 x"a_.;h7.i0?ױiХC45p׷ꫫB*>[MpS^or/!97-K5[5pͪ{NdMZpUŹȕ曰%hcYW9_*rW*@p'p3v寚r<=ºDY uvrt4"MtSkْ҇}|~ON. [@]h[N7ϟ߾,㆞^7|W,lP7Ni1[) xWp=W40] wkYWSBE'd&˅=dU/ NjwͰp֬,Oy3H>jC=eAVyo-{u} &T?ˍ WV0vt#q@oCJ e⊗;WpYd}}lo@?)O2O ~ahѣm[x{8^X2ƇjLIwV]SģJ)u*nM1;m<3#sHrԚ )5C.Y5.x_JUF)t;R%bWXcĎ=ŝS..1tⴕ+N>UZA>4Uzu5%uo\ɠRy6+|& `MgR+)_A@i)ep?S%1Wy04w-[1Y\(|E+!RHѲh z챢eيhY\NXDXT*ҤRqKbDˀ*!9=Wo$V@h2@|=9`^ߪKz=u0fv:+m4NY"xh`VE YR)zU\Ne乮97TeSpW$§sUZ!t22 ;;s th!YL+fwYFE#PwGrLQ9QʭeeVR-ɘ#QH"^-E]e1޿ ޯ0O]Eku]G+}Ovv7'o3+=O|JK)șѕ'W BꠁSRL."͏G\vLUΥQmyeD D @ TAU‹`c5p 7^+h+DNꨣ5V s|ygӥ/}ƮO4/b}RֆpQUO_J쁙1ŋ"0p0?_V=a89 _^^oJWH˹W08wKEkx޿0յ 2ӿ0qcw_?-.|{/Xf`={ UG:ccV'[Fۃ|`L`=o?:OVO87/P)%#֟?}>@UT}|?lpk0#? _gP?d}i pɪ^9;CIoq3Y(cʿaK_[y+纂u}uwl>Tt?󓗸MdNj8΅,Q.w>zup:ka J.z5~u|Gǘl߬晗sˣ˭VfebOѿF_/'4*(P>&?br\^]#ޯǗez3qYUX .ͼ}G`Kwff!AŮN_E[@`aW7b\U=f7a:t}ku^ً]wIJkavx7@SUh0Ϲ<ev pZҪLV=2&l 3J;ydpv{UxY\BڡhvK<^KpT >2$pv8`zCJna-AnwU#f޵q$D6{pAp{% vHBJy_aI)<$[LL=~U]?Es/҇-Z x6 :7"Vј}1'϶]J-1-w9UUTh'{LO+8k*'__@[al紻6ThJAlW":!vBU,ҎHJD5]wRݛ=7mW9D4J:zh^QjeПƓ#U?k8xArϧ4'LD.]A!#M'>ǟ\p8S{ULoM$djJ+S  2N"aInj,RC>T| цH"hU 3r:$)n39OCռcwNǃ,wZFmxSUHNY^ sV)#T}EVJ{.Y"T&RqrG쇍e)O:)ĬQB0f$Y\  k29;~ AFI9!Iυւkj5 sbXHDIrbv,\ROHM ݄PW/+E|f%ŠXu`2&VHm]J5.CӢroZj}h$n,c@֐<$MMik(1+9(JtVt(c'$c3-0uW sz1xptq$pYcgGb;=:QFZkyOyJ'p_n7D`5d `TNZL1ؒ!W%:ҡI=~} m_ēE!@+O'7?"wOL[rMWx76ca;Xv{^}A{s_gC~ ׻tkmz.m4#:zE}XLͩe8]B؁R*9g#J;NqrO.U:BJ])x휵䜶I4G KQL=dbш]B҂v9f}d+)RPV$Q\oB F.z &\^pӝ=~X\i".۶ڟϝg7qzå؈`SW fīG«.?טeTHSY*K`;kt$dd6991 ;݋ၬMjV9u[Ue FY s`|'wk66:sZJfq@2HX6TZf^J<S?~.Sжo}#A9S6_:OM*n%և]:rGtRFLо#is2IWQܖJ/"䢷:r$y  ;쬵|B5:K:$Is%W2^Y VWrXNIk6G:ǐ2ʞlNJi.)ZT{ªu.a)XsߴX@#2ܜD*ĵX <īՋ6X,@fMȣBXʙW]96\IUKb՜ˊT]p9̜ ^]BBUc0%ww}+Q#Ať°u篤Ї趄gܑsh#-s^@fv}QD]5l:t182IĒQy%sd_n72֖eO֖=ೱܔ?.b tTǂTނ]C@_e WYio7bg7,60ے޴UIݶSxp6v/#7|뿽Z<]AQ|#rSUDeQ9kT`z=$ ! Tɱ$t>ۼvab,|rո}|w,Nvy䇋wV/jutrΦ}+N NhREMdpzٷQkRgWE/&K[7+t!A5, .&@SG`]~wJ&B/x 8DLv-:bLl*xE>ȶV^WZdB* >Gvdrv}Nhɸ!vSkϜwzԭr'3#u0Invwqt́Mcy.𬃋uHTK )(T"#$z$;砐Ț 6ZtexlTՉJ,mb ™R(YypLDL\"@VbTH#sTr"T)EZU| 3tr`=EVI9ko2iR61d㓳o-4Dxʥ1R7g<_z<=io=ԩ"-bmys;gye>Wgfz'oߣi^¿dk֝퀓K Ԡ?zy/Dۏ՜,KI|y5|DO~) \\iGA'1=(lE`HxZwGL %X کU:zUlK)&kH~yQ?h 8BVOR:d $fPdc1[jf&"'X]L,Ҍc1i=OZS@.f:T<M6$[{:E b"'l dXmTcCFDwmࣼ`,·Ax_(3X>ME_S,1G#KiF7p\FN<`l] s 1+q`_cUH>+]dI/Y叴DK@Y%$&GJ6@dyK?!bRD1( h1  1K!x (nL@[Jύ"kF5WP3V[V!r!xjb!b̐]kq)Bޝmx-Θn<+P"vW1 Z4U |GNudcR#0!(z~b^|q;o=Uj}&x6!8[3l2Ԑb#qD"8 ~I\y\;18qa݅J/9[TK)'MUb)Cr wz9|j=$('+FEBuR3:(G)Nk+ }vAzrф)~”z,,P} hWF!klD'iIǐVcGa:0O筆O/-q-L'!/!{O9xզG l(_pmֽ)̞kmnXe XuR%7rml)b9y($%3 K X,a_gft;nym J 2x]tzM&I|6.QqW箿y{+rcP_bk g4`+$TLJsKFL04]v"Vc gYCQxa5L̔qJ4[Kp"8j5d`q; 5SDc`0NY,-,xGIa6+yLN;u%M*6d |(: T-*ZW@>,4tr>x˪/pt5\:1],7/mbn vV7_9!|kg*:f߸^KN-HW6JQy`A:Rt[> \Ҵ0TI.NQi$*`D #s# c+ÝiRهhQݜCd;4558:"6B9* A4q6&U \3_o)|ݠ_سK֞ DWI܃ pVQĞ/,G&8r\o@`L_,v1/|0aqr1W)Wg<YƷ+6!692rV!˛q}|7/ܪK1F(]-L,VPd@iٙ9%`LVz % kQx |A1 N9ť/%93TX>[6.M2BV3Ҏ ʅ7O/e" f)ɷ&;!7jp:>s.$I [0L. ,-r`PA5E(( :X-<q̈Z*Z0膸;QXRi3.~yI a:0-  0T͌]ێlL⺺):,{O ;KBhuV&@@QV2lx3 Y RZU3X!e,   $9 p`/T*HSw2<̇iQ?FM>eD^3#;F73h=5` ĉ3"P(6hכ;";=~n" |zN ]e6|z~,CLz(U~Njb<`V![nu^++Iz~wV9}ga:νb{?2INieצtszYcg-e 9Zr;AfNb*apOEOa$Nbwu(Z6(Ҝ1s0u,^+N)Y7Qnbdqd)93Z"'HkSxGԕ|YӘbQg˰(;*^[G6YIl}Ps QQHE9RH%&.9j yn6-0cI$zM戴!+X~'HDt"e%X9)QJJWnv`yf)etQnws0Zq ?F  JJNWRnJtv0Y \P[*tQrҕ)""\*բt(I`GWCWJ(ju{ |hIc#Jڹ$R V=Wƺ7wQ|J9Ict`N68FhS %n] Jwt J`ZCW&m+@h:]EwtutEBj%cPl4L\% ]LGóБ30I\/˟>/].Dy ZYrm˃w4c/EO u=)d4Ib؋t.cO`Xr%5(XIov\)t8VK㇇cR5*$gxVy]pWZul*jebC7 g{Kx@P>؉y} ٵlW_plu{@o6e}:l.ͦBS5|Z\ٱxDZ(k2(iSޟ&4U"}ci'czYaQQ4BƧaA Nf3Ȯ+Oi) ~}$;{7aq.3o>cGٛҹaSpJskn֚G1m6jʾ~#δA$LYr#%rYP8f`>JWj0Q6tγ4q'fC6tDu TE_a+ߛ+\r^aZ:bƲ@5!;^B0$$(I^"zirFnt! /zY@n*W-:t{ K]I1 TO(~q+E`zh:P$w2ޟ?~O\Zp_/囯Oo 1P˛~MwE_|'{?JrI Wݒ^ p=/ZAwkn&ﷻ/fō{i6O,YLFtz] 4~RAZ<I9;-ݶЮjt-$gPӳj\o1>fr.G ?GWYKK'oۤTk%Z'7ʀ>M+Xնpa~ [☎e#ldM;;4;9}6Ej-=뫽pPvt|1wez9=cIZYa|Il1x+w8S3/gԾ֌ )66r#S. [C*ډ6)gts,D6^s $0EMXrSLB9xn.I8anұ)V9^9i:~ؼ@+T_E]࿭.2sQ[L+{2r/EՓڼy7OI|SJb;,Vw$aKVev~){i~qE 9Q&Z!@!(>RE$~f61.C$Q*4# .W(5*hJJ\Zm! >Q?ex̊t%BFR{#g?J}Bwף`/*wӹmĥ]PQ.&W6_b۲yvh!\J6׬VlY<<!ZF0B$+>Y۫(t(E2٧6@"pb \y]W{90,=$$m%k N똡Sl Z $qM$fсjؔp|t!yo2)a嘴X+D6#G*ذX)9Tڅi]GՌ18]/.c`ҘϽ 5[yQ0COQ;2w#A pܽ/l8}Un6xg 'TOt'}`)pksCx LŁ6z ZIA8*u )wuC$jۛJEQd&/7/'c2 P"pE/|2sfr;}.(nNNEjpRb1?:ixuS{R{AҒVV83#phtwgڅky.e0CA.h:UgIE$z$Z4 8=$ZD[H] XQg>W}n]?C<ɥ;:!q)IY2b2sNԌ\vQ<'urZ#ir11Aƒ2dv\v(@;d`.D.%H8ʵB)aő/%Gs 1zX*g惘u1xEc{Fsn*+Xwo2hB2%cZ] a]`W\%-[647+o߶hߔԇxGt{muHۻQ,g:Km6\M'@ԖɗrW8pYKI @y;,Aw`BaACob}l ǒCxF{_O}fV ?(=X4?/~iK B2Ha7jn;$ˡ'*fDq40@(IMZsҢ> ysSETiݯ]~>ݶo>Pe{p?WrR_Og[ҴǣN8;oxIo醱v|4?[}+Kn /UցKG8v0;I\.4fmկ,6TǶᖳ;9{vn~q{]Osə݂[_vn/H=`3wvzOGwiꞅn 9/zGEʒ5nC+_4Yl >#CO;0tHǸE: bnq!vHTw󴵾h9ObP@}I%Ξ!vpxuQiO?U mgw$ʫKKoXuK!{coӾzc1ڠ]-(#X z %mO!:j,ѧ>ެI9;_8U:( s&A:M[UO詝hxSڗ.]3NN/(_2~-D;IlbA :rZkHUQB&c`Lո "Km:tm:4 &%]j `$fW f֛J%%{Rdɻ d#.f>wH\̙9Xﴘy -/SX ?;3C`s 8w\Ә}luhsBuKgaP;2`ǽpܲeL!Ł*U# ERSE8q-1BlҪ Ԇl@/Ԧcyҷ71: ZL3ZabIUG#h"zO5mol_:lNb# 6UBk X٣6ʼnD P,P!\qyoR>qE"ѲR,2-XY`Iձ K\BhMVr+Hߗ7C+hX)K x2:/Gs)G%I 3A~ mn|6a0s,Ehd5UC hG!(~ww;i4|ͤK ߌĽY6|tJVmmʯ)Q$@ +owO\dž~^Nr[;;w@nG+ tn$y\p{F p{%\7Ѓ;Y?5raR رlA wh$}6ra#XSK~Mk._re,f!lCZudK~sbW)gtQm`g@+$0 UE&,9)C껳voD'Nn/k3!#-݃]lj>gR9賴މo~t|Ok%3|@ <;C%)]8?ɀW~=AVk(v~1g ǀN߈DLo˓g35{(DwWGL: X;Lw! 1z+cYս{GEG}9k;fȈgEdVD2Wx S761~G7nIk9]ܱ\Y-dKxl ?@3A=t|!ϡEtHvHl!b;$"2"2SD16 9%ULbX]Re7)!Y c^?6i-*FiIPFjy-ٕŁ0Mmm >2JNg KhC&)aGTYQ[S\IK0IQ `Fm)N= joy2r*L^N Ȍa}d\;aO ;zبw՞W|&gkE1IJhA*%o XgL1jFˮ\cT-msmDĉɡd+j AkPEI{#gf {cu! aЅ'Յk35de{~ainp}}'ՙcUcE]SIW|ۣzdḛ:vn8{ˬcXIpf7FQHAݥ_eVCW "PD%Ќ|v=s òeDAE{,,gL@8+mLu3!ёIYs#-iFz![ˈ9p\>8qq7K{qCqr;\pVۈ-Qp;5x"I׫ܘfV(tF{lRI uBR&Ńj+bF]( N("u⢇ \:,AK]r-Xc, 9uўx,fnC*epq|J k8ؤT 'PV$e9LgeQڢ '>XQ;WE(Rǝ F&Kۙ[#nлFK<\5*Y}BM 9dcgdyׯN|$Iu\}<%)e:UIlA42@ТBЩ3rOp$+^LJF:&^%aCZìEGeUt^Hd6@ ې/2'A…ƒmB%YԚkYl{uQ>q~y6\4D|UǛFNcdfna{&h Xm2+>XY*%h@0E 4 SJpt\`w:;εs,TֻHBy-scPQKBU"kl5r' ǓxV֝,|um7vEgY>eɹ=+7r#>Q?Q>Il|Pro|1VS&k3t8D BĥB)*(S$NW_ -Nal޶i ߗZ^. gF:^&:sI8Ӫl\ɕĝ'ӘX J%/Q/Jd<0cB>p{)lzǣ0^ol/&?~L*yǭ)9L؁-n IznռN.gtQ-2O8i1ezƭLB2^"^Svfl)+TG4vg<zBOn3/&vg?}OsO)rfD꟎#(n+0bl.0%!}}M Ssd g|3YsBOGl}s>~Zmqpx4q<{}tH@ż-EE`׹DnߒE]Krz}Qq)+BnѪ0 GJ@q]\}b;9m%Ru12M[fpqeSmեmOfϼf{=]0aNDPp$5 Pna&Ox'NlbC[^0}֏fiw[b0&" v:ьb)E }&#1Uv9+LKb!y\e\r " \!*iZ:eV6;aMKL-?fa.gUʴR)h,KFf^m`V@80j,ZAǪ]嶷ra(Qב]En=#`.67RhyaH@ _2JiȾTw5MGo;w /pq$+׼^e~z!o/XLno^ 3;sp zyӛ[O=QJ2Sr7rcr|>Yz6_ nLzr>s-ޗ!Q%۪*na\jX&: @gxWU5վZ"౳=gQU%WMl6RŐRQY[s^XS!jYpNi)[ǂW6eхn%̔0umJ/pVI=(cXQp}y\5Emlr6,KJs:OR5ʪP"(W:sWFlKƱ[noz9'708;On܂'*#=o~|uQE WYS}LNčXV;Xv\a *lr%ɓ6έ.Do92=hf x+S?ʗu4J[X=ܔJ]wi̯EczeKN&S}%s߽>MƳyӄ37DFh*.k=f\! /2BZG[ DZ rl- ]Z *W6@糵B `Gu8\%ZcvJs+%~iWD ɵ \R:\WZ2l&!vJi;zpen 6?pE1HԚw!*•֚+qiE.8dJl7k(Uꃳq\zaݧ<˯Ӥ/utցYagPp5~#Ip޿ӟ}o!]s)}zp[q~ui[Dud4䠞sPlo/Gl9sݤro˟pos@G?xyN#E}M^//`l0vL?VƧW8sٸߘ~`^WOOHㇿƥM{M |Yxػ6WP<\znR%bUI֑a$"HɴJ!, lIlt3}yu%m mblb=fKΘ3J?;WL%w %?>oB7ϛM&y }ބ }ބ>oB7˼ ZyFW_tTX  <2W: SNiJvP@48=CZc뛭-̑}K}0q91ZLӌNۉuq?S%F+nEBS;=(MP ʻ:H$w[ A??kl $ 7oIH@$gFksN"3͋3\<ǜh\mƞK| L?\>u &E0cށБ1zy {3v(ɏ~2SL a<D h(«TJ,I/2O Dͽg)6CwWn5G0wrpI eiYrʲƱ~_;rގ>ȝ;ɖj[XmzR߿v~,xMD듚f/F(Q&bJcQ"E2QIhR[Xr|Is,*Qi7P3iRkjpPzĖt`tFeRLt\ $}%A$B^2C/A00,$fU.AS%;02paQ%}׮ߧvKY[l&s{;Ex_aP-|ħl6 qOp1+S"%ThR@8eSnRSRiFS'pFLgW= umٍ61^Bg}(SL}b|4kv1K/殳I]+nj2+ᝩyɾ]byF_[_RpCa^NdU 9+A 9!pl)/{ VmBLz$+Uls)~K6aZxѨMUS3t8)sF,4V#$t匷ewNf^ #(.WyLLPWrCk-]*jԐeOaG̎mzx{ 1+mٮZ-'RB=]VoA5H՛foB\j( ';([hv@\;f,~w6n7BrJE]MуO7u}>n~+r1Un/-&D 9 DS[6Ďd9 *Õq>PBZA=N#`A ]I늜>8wlR*0S28P^>@m͖~EV-ݗNfM[H2DdH:JwG\6gd qHnt 6F ~5N΢)81o  MJglksq bs <1Y!Xr)QNQ+. r ė/PP4" 6͊ՂH 2vL q1kZ &Έ{踨ܙjgܛ*=a0<oxN.硶4-c2EVA (s{;"E7=o^W m- i;»|0g⑟78UR b'BS&s Fh|"<P}5{[o2tY漧k]r޻,۵H\kJ=!&ji.*@zgLD e 5B8ɥgWUۧL>L&-DZuI>Ye`679DnK}su=+; vP_l ` Eī\lAllP+yeO !.X\}\v,܎|V5(cr;yxpPY >1>…40(`=*19ILqN-sIü'L'8h-2ViMQ@t[Bh )(m)N&.A}YD"i1tFη2Hxbt"&8Ddc \+ U/D9"_F,ĞY,}߳'kh_H/,j5!u w§}mQ=R-!}IU@&Wӫ Jx\ gW]`)1X " -ZD)>Xjm]^ׁ#U*Ky;HSaTx͈"uIeZc6#z8txFEGG˫|Y8%,x# 5/W8XA'LDx 8{`H:!(P'2T5Q@M``" $$YFiLV źBPƹIB,MB$ZgSG'?LIExگ3rDRiG_p([avq1?,r`/67/7 ~=cKq2bKɲ[`<#ժ:"JYӃRꣽ>ŧs~dOwuO-xOl%sFAcs !5vģ/p}H"C>Ǥ6R:;̔bD‚Z"*0!:( 4 D儋NzڈcN!F/H"rX.Yסn3ŨZw;63gƭɓ8μ3WA* Fג 3CԾ)&cۼl߄x/4=g4>NhyvyƃN[~< Po N}Oty:~A$;E؅*/,h5y{v~O_&98[/mɁ.f]+PO-p~Г*OؓN+.y‡p[TuK Q_wVSno87QUr(Ń-]rfY峚#kƩܧ{_{\E_nZ7fr^Tr!sYxzֱ9Rxq6z{rϋ+|F0y~ Y;z-7{;մEk;P˚g8ܦg7Zm.莏-~z3'=f{!~~|dGv+on3W\;y0Ͷyg@m/?̉gs\}ys7bjcʩ?R]-q{ <8J3mDAze%+^d2u^dnXJJrT"m8Pr Jr^6mcm'6Zww.q E-w¸Pp W}87\u4y@y;"=z=۔v˾1}=u ^ӋU]M3?:F 6H,H(˕:2I*ބ\a)Sp?$GBwaٖЎ3.4<:F}>+K@p f6(#CŚh{-[A[c`e_L2䨜 /BQ-')Ipˆ:DI)PLhiƑW)T^J"Z]Sjʜ󪊡j> /g-P%Q}ڀ۞]y˧OT%fFr)a Gh 9;]4Zvb%go#Iv&8;.(`OAoj,i`ysN(ABHSs8 J+dG&vPW21GHZY3sv>R=p>;`h"+顈AdV[wDQZbyocJ6x-CkPӯL 5 ҈`E4¢VJ\9g\Q,Ict, *baR6y^ %Ⱦֲ£ bXd nNU-g﹢%W 51<څm2vsoC8:[+y|Hr#㗴ڀrӐ06RwadYmȢ4d-6.D8EnN34doIeрb sTs t6šsک"9}wd4:}d=`!UM %HϭgP_.ZT mN -)DKt)-s$0 \ś,UVt<,#Mj94բ; )o'cTG|6?N?NgO7V3c{٥"MJ}pr0S??{ѱ f{7!ټu)72d".|jYZ"pq63gZFusiP{dώ˕{񯩷NoG%wB%6JsZcEJ<:Cd'xpVK%FQshz( ʁ+R PZ#c3sGvJ3,lb!4B=bA֢zmWjf!gfjE:_lp~>lq#vA@'J, U%pi)DD[-c(9ke)Zh6`"v߆Zq$4ZUجůh! vD'$*Lƈ̜;Φy q]+ jÈcupR\$ߚuǽS."` 9.Z,ʦl=6R793ea҄5DT?!!b2Ǧs5g3sþi8Dl"ifDR;s+c)RG@R1*ْAYQ A( JEU8&lQm%BXɀX:#1\LȒUٶu@ 6Ff񣎌n\Cl%bڎ8HāS(&9eI!=ĹK q#pB@8!pq0s+ "| ۠yqEL7p;Ȳޡ nl~fm;fm6ah{s4kWkAYZjl~*ei c;L+Wq}z~<мBS a^￙gWC3MhIH^:6[΀Jݧnm#juG\'Z<7|9LW+a 4>"X͵GZnpUt8+clU5cjá[)XEV:`U5xjC+k+WohચZvpUTn7WN;wSUj\%r|N`rk}3iww-pK W) Tb]@59FEDw:'[܁ .ftUdw $MvR]\4 J"JL A)UJF@vtxdZkuUJ-$CDPc,>]^G;?OòjicS^S}~g6>o)fL@,&7 7i{'b.?{Kt4'ď_zsBߞw)|WٷfKɟ9?] W_Nn}g9X F!p/M$(pA>\I1眒q$rXxz)ƺ(4er^`\\ TԹ6 BeTު$]6 Bqm3svKz_;HΟśOu|PviLFI9+EJE0!(HƐ)JT),`Ta &8(f>q"/#֕ܐq L>uޯ]1Ų^ڣ#z1XgccΛsDy8yU>WT Ct}sd|ՙR"f$>dqqAq ZJ?X5)\eH2BJk4/IieLfZHBcrce`q .wE"|]#~I%RGRzfHQ4DKd/SUz2aDS )0E*f2!k#긳ֆ,7#6L;F~Ö$tHBSmq[xj:,bk4v yD'ՍQ.$xR \Ul?f3< )\ tgBFtV9:D>qaV*8rM *=lIv6o}["{wOѮz4M-UD0zEڂV2z+59{ ~X0>C:̦}'qtrkc69Т$IJc)asBn*hT9F~ފ^ϦE }5XG+e,KW BO#7OT)֢XZ&lnR>~:cwrjBԇj~Wkп"( gF:$L\2{δ.ltLjT$CdK_~OeʔEƭgLB4DbKԥ36g\*3- z:E y^&9Ii+gDI9(8zl '%!znv%O˾\?tD6q’zeΉE'o-JHV$#A 3aYC= G7-O2p"{K} |>yY? ƿ]S? Y iRn=_K79W8Mf^_',½WN (,*^]ُN`윾cJ]lN8ZiܘQ@wٛ{g_dO}Hq첋)e)O&i{?ջVknsFܲkϨdGΨ;hTa3tL)ѿNجƳD=uR{wH֥ϋi]7s$k/wó?Eeʗ)7'?Z'yq#!nBr\Kql>aAɵgn/ T p1Ҏ~m{HMMQ ˘^,#n`y="/NjGɦJ:Z]O^Q\6GX7MCs&4߱0s}W'&5DvwP⃻ga99ݓ.7$BP;$&~WSJ, ^Vs&ݝIa(IC M ;7D /:>7:`UtV42ޥqc rgH]9zUG&zQ"hČȔv! .&{KK(BS[lSxXvK.f^Qq8O"g9d-Sst,Bϼ9&~ AN9#NbK t5Qf!HtBNE'ِ +1Gi=B`nCj)BrZtHIM[aGf-=)bVNeh7FU:Þ7c@k@KSfA%BT H6# ]kp;)IcV EpL$>Z\by> \Fluts@o}'ǙAl;"]m7_\jueOiq*MF WTg1էGߦE }wSwdyq[h{=_ {׳.,G.~N|ֻtߚ(ڦR !-н#Zꑠ/M̀cA.U~a2VQ20=7Z =ο$ =#(go4WJJ*%rcV;I#E4ZGn4h?'>a-6m>c?]fW׉=xHղvaC5XO7Xm}M@7/?ƻ'#yJx?82HMI.F-\랫T{U^$U\4Z~n}D6yFv{TFf$`wuv73ZGҿ-Q K'dJa9H%> 8&>F9卍:^ &&w$Oon,Prјdrd` tz'.'FB<$UA]mEpJB"vj{!Ws[ivu\)>*.U Fy\\{湸9s玤M![IZ@p#/!(9+,PeVi2ޣؐ__~JigCr) OktBzK UL!"1V [˃36 O1Gc:iy m6ZT nV 2'88*cQZdY:kvI9t`\2$DT DN( I)G *ÐR2rhvh1 %%"+GN7/X: rd@f-*JdlE=38t̉s=XxV) ܃Ex" $3 `@TC`HmR0à C*T؍}6nE|vRTtJXg ղV >4r 'נd$z Z;e;t=O$66^UEbJBN@0CA昘JL^ N2ӫd9 tC W>ܸ; m7!rGx2oroXj/HZmuԩtByzt4]3%* Q4 !8] ke|ыeu`I̐D`xeT3tuY. p4GC"ig,A&ʈ~R\H0H)/e+C&UYkCF&sqmJWPv>x.gB(h ABvswM/}QM`B.V6B2sNTMw8QͣNEK&2G]AFǵO!hML&%A1 `ڠ)y&"@UYoKbJz8@"\1{tKh9q{rD,!H$r0gШ$hN)gКu.qa{ ?0>R_0$Lo||WCLByz;&Sʥz)w| K]kwA߿(ʩ}u- c?2ix%iVW>$~CObst/ft>en"(hOb>"5=| (ݧR_p.51e$Velqtn IfK2K*j"][P)%=!B77) iFPn趋vLs|߯}8dr+vd1Rt Omco7ޚ H~t W"?fsKv8W"טsIky.YGtWX̚SY?Ybɘ~B.[M+]?]5wOO9t5f!~=Wٻ6dW|% X`7p7_."DRbV"eQ9cM9S]uN{Cnpfηvwh>̮Çn3 w_}Ƌnwssk\utY۷|ͭ|?BOըd;.ܝT:*پ!6ِ1*޲弄.nQe,yW>bg'N%ˋ(9A-xiB](`E[>iOƻ"de p C5L0Q[c Ǥ}=2]68~Ffr&( A  N]xX1G :)EawljlD{F5l{s^(>\brRB+Sb6ł*J,2kږ"MvbݽoƂG;{K@-aYyk,Q8#2F !˒I:3/G#Qj'9j,$'*M,ʒl!;Z'3=x!E˽)cM4fIJvE"ѨI!hc!Fy=Bˮ#aMKxWd}PkQ( 8 RѪJDJ1;`juj1N㤡TȖ^&e.R9k׃bAe.JA8:yyכ]Dߊb?~g6E{T"e: $!(V+e_U[ ǽ.wDXYn*#O/יj|c[-<2arF

>b~MwU=ĕʻ:Eit(,Y2 XG*:k8cȞ xU-WkBゲv4/RG:u녰<_YG,%6Z Š.SȁR%:](5To͚<0dȋ a"BQ8%!zW1Ϊ+S젨2j ңb'3&)zmòoV:'P!ZQ@@>p=8bUBcGc:1]Lq_Zژz9hQ]0sp*= mQdg+cM߫tɏ8d ۠'t}~~Dw95)oy?W}O.0Chж֎&!٩4s 4OKI%f#d TR.%K${F% BRG#N$0wLL."A9ҁK +Pj/" (xfL$UގL9}"}S8cݳ i6N_4~域U$*9Z %]zo7ԎMkѿmy[oD PNjo"WyٴЋEI*> Qg]yh], EᚠkT2Dd;: tXNbRx_p. ƲϚr7WFk&" cpRf.}ɒ:g צݵUM[}uyjYԢ|ȓ~|#O[d~5!vdmw2wMbn~90ݱg[e¼_jjbk\'`-1fz'5o*PGE ђP!Pj}(6jYhd TR^; Gh*vi8 IWM*X@{%JD|2 هJմ }I"Y)XI3oPA0Fرv&'+@3c42!3=90iɐY 6) #}:(:E=^rzm]%dTJrQxEN+4:S E--t))D֦ zr* 2l)MN4zE0Hu82vtiơXHc!XxT,╿7܋{Ra?|/4GbdJXQB!ϡbP 1rNJN-aA9EYadKpl'O$LN ;ScL;\MFٛJP]iG_PF=E!Y1z]}+=fvo -y)RǤ3qS<,578Bfˆ0֤,Y1?!6w]3q6_U` "v"Ո#">X+^]A]QDRSF ۄ.΂Y+&C8ƙ+V$/ɠ7@&uG:0.Λqi3-9;>zDlRY DQ>ېףȏ U%o5[9&0]۫e$5mR5}tF?S (SjվͫϋwejpKRQ}9dw~s&"T Qr Wr@H{B:B:.r:rQz<_Rԯ>кcU& eeH3h*;I^=K{t'Jyn>&'4V;5 =Je9?YH,,r=4zuCCTZƗZ[3H#lƾ)CT_=eմ{Q[ vb4yAl$M:Sx1 o?x5]]/&KVwT 33Iro|~#Hv@[ U`Puz([ X![ UJ)ƭ@@Z64Z \ Gzp + \Uq \pU㋛WZ Ԁ hWUZcWUJ7/D;*u+  \UiSѻzpŞ{+υ+WU\C*K)a^"\9`W,;J;Ғ;\U)7F:\SJcO$8+Ofi&+|\WN=WU` *C*žUW/$i@p[T*SC+JIjW\/Iȝ!fKv&m$`H(]SY e :B*}GiRѩ|(MDڸ lHj0pU5pUHGa@pAUPpU3 + HrHpaWU\kW,Cw@W/~3pZWXi)6K&WnjDPvw@"?X\ƕ+Zv`x{[ܹlWͺFӚY~vm% RJrqS;/?xk k`y.-UlQOFmfmK=Yf߳*fKgUEB;ղGcP.Hn|x`WeSqӚϟ86ω\)̕L% Z}e/V:9Dlɠ-& :8(BVE[:GKֶU/>^$!Zjigb΋YNZ8_˿Oլq%.y-(lK0,7W(X/('AR*-7ث >،!(|TRB1, P1"i}] J"wRS 0TPǔTJVxΥ ;yV^"rgO< rTY+%ݥGfPi \Ήg C<2HFr; R֢sHOUx͔]]TR1JfV"ʆ# ! -@YL(Wf]sI}0N/ )6OU:ܮ:].w$B"sOAhc2c1A#17T3F+s]uUDU-~>ș{}۞&ЖQJ#@~DA:cM!:F20T`&B )1ИU6F҈ŀ6_ߨGJJS(g@H+Ј'EI|c_̵\ޢAT,I%JqTC7&>{t A:^ kH%5VRR# ]Qc4'V~w3NZ#vݺ:SGZG7H}m fQUއPZz VRjH) AF5"CPuSк EF`1tvּXm69bNQCb&cS#U0ss`K "r VAfY0H!ّ F >b.聼M`Q9"_ E+}`wrN.`fCm>1(h-aK.ؾ_q$o)g %+y<Ī%4J|Yɠ-]^A8ReXLčd 8'߂U>")!ߣOjPѦ"j6[ Ze3@<}`q]"8'ر}x?&_oVَp5TX{픫@ %08T  e>6p9ٸNr%[4 W TfP7j,8H&{jKv^Eo;5(!.^c@CnTzCݕz- ;12aۛo}0s@Ȅ%Hi89_ ϧT)[K1 ,,x$:M;*Q18Wc꺋'8TR ZUR r 6, _ kb7۵TQ;W )np鈤,BtSMRl eB47U(H1j{VQRA}k b敞 )hDrV#5W"zJ(k! e (qҰ!zgJ#SA ԙ)DO z/7>bA\ GHN+ zT( G>}?̛y SYDkU0lh3?z;4HL{T^*:OPtd㻒9Yr%BHقj㪒 +0똆'$ŗ,J肸4Bgek5ۿB#bjѭ^k͉58)knĘ3@9ֳ PӉväDy ) H[ҐQUr0C{ s":Y'%\cۊLv0%.(HЙJYڃ D#@zz\ {fP=XA|bDvLԆvao!vV>x֞LaUè˅W(Შ tQGq1BՍ01RuN` tB)1Q0z54hc3(_ G5kփ*UJ&m:3T;*|AjC*!~]ے߳jyAцId?j7߼AF}4 ŢW{G D (":ZS$àegJ 7bdzlx]HmO 4%鰀U2Aن&p*qKކRZG73[1xx@A4}J>R- ]!b!S-ٱP|c/")SJ,;wR@ǵ1[ Y\+y߹xu"BP޹B# )(e?0 ՠLǽeZon79ގb( tqB 9Ѓ[ޣ8ow>߸LNɡM;eTSZ|z}8{{q/blU54&//yG?{alۤCEWc?n_Wa~ws6}NomZ֗7Gw"q~Wtf vw>G@7mZm0-wopU ’|֛{@@҈$@Z $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:]'dQ2`?i<'%9hM@' e8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@UrKqmj86we:E'PB '8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qd⒜@r@Kn1N i Ƌ@Qб'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q[c)0>^}{[M՛}ws݆߾vvY]^?F/ȸĀr0bb\bݸyމqK(%,y+V/:vJQNH{Ԃ ǍGCW 7٥WW]y=qkStu`tu\ֽ: e0EWrBW:YSX ]ikBWc+FsRt : 2lы+kRpt(N\1Sݮ/.yܭ Mje&߭x5 oktv5w,?VWTEju7o_? |ύL}*{`j<{xKӇh2\ЂM ҎWeUt7* ~糳/޽Ow3n'|Ng"_W6PL.S(13dh8Ꮪseo6i;>SŹQcY!K;BNBw }\Ԣ׵in&gn);sb3QAA~JxX2N5RjыՍRYéY'p(?0bڍUES0yۨ3%L3hgl~ wg[Lʯ=h; 7 =qf2Lsoϕ=˼~gg׫WFAq?i&=?Z`s'ڴ[ 6c(Iɭls33HK+:F ҕwY hb p2K+FK;v ]]q  Q ]]E_.>N,uK+%uQ'tutE$bQ-nZڤ~Բ銼iI8jgq1th (igyHjt8Q^ ?0^ ]2] ]}kOӂOVq1tp -戳cZI[]1`CW 7tt(i/l7?>ό9!ZOzn|jA4 Q45q)4hooiQ%4}4DWxbܳc^;]1ʠN]\z%Kn)tñL^*`’r pZ5Q tut#9DW_!v)thd$*%Ԓ~"hWDd-&]]pYda1PF-CWC^~b0/_xe0/e>:>*]}k2-X+b:-SNW[+#|F59lhvq3OߎLR%>EQ*F>;uWsH v xZAw+{gӽƮ+ENلƦ/އo60Ӷg']]os.N谟Ob5m05p-o_۷oQz3<7^W#*h `:'c[e={ͿPtm_͋\2}u*Wݿjȯjp_/mxwZM$#IrmË WΗ>Ѧp^§נv{y7h}v[_o.w{_2j4Rj(zZ>QL& o ([OJuٻ6F>$ԏrٽw_6kS$$kS=C5hylXgzj{U]UݿBm ۵IKm2| "_e8 A6R׈1q]6 &eF2 ,Z#ՁHOufVzI||9^C276z]pZTsmyԳ(/\m7^//TqS|s~3\_ſwN|Ὕ].[}Z:)r@N}Wg!o& lT4K\߽_4Q(g.[?LEz+`hLH)QbҊ1G$RH"1M ڪ!cVprD+!&Uhv7'<1C\a8d^wP B_Y(Z|P P,(e*rZ|hu7ƑؼBdԍ $ꍼLbUX s]Fzej%o[h6;[pi/|RGM.g,eozaslgo)fBtMjgA~ym |{VwqֻA܊=,{:JsNH~&y .@y% tru48*. LC0XGV@nl!Zy" pr~ւMɂ@-,h{D )4٦\4W8~89hd> jii9j촭fNj>hĎd˦4@>VHKV܊.w Q*R)wQ uH"d.e kl $ V$d AgxF;j>1aJ*k!nX'dV3 4| iyȾ)}>~g\=D_Fo[p6?ϴZПZ`0fj;i] ࣃ/5j{lWW"-O%LޛE / Șu<|;'?)h0gvX"9P*FVG#$$.e7*Ř[mY[cgoY@',5X{_|u;6[β|!WVIF$!SOF\ڱ]2ڳ$qprnJw%(=ϿD4+yDؙBP;vnouGu{Ja,HnD4\p2 CsTMOMQgu}UcоKf.S h8O)xL XXr(*zT|pAm6SYno_:An6T@ %qJd^IJ ׷FeR!u&:u>t(!jwS /ւw3* Zt)[zֆb Rކ S6&(ř۔L w/U s,PHk݊a4f Ϝ7[Oz3E$/Ⱥ}6S+uN&K{3Gx^SÛ o8Cr\̋)N4) E){)))4\X멓TNxFeTO4k%`SM`𜠎i<3Է&K) =?/uZ4 .ڶn7uLPYFIb:Y 9YS,DJp@DnSԋ"$Hƨ#j}tϲꂓ㰌~zVFà.0Se$^n:\ eaL H-h\aaD|IN&ԐUߏ=|9}k؋z4ݽ>ag $aρQ=za~ʙy3̉$O.XNyx&D$XM!1 рsN#b,i za$`sAbNzcpeJDP)Xhbg7T) Ÿ=CO6WQ$sP ·^6n.%"/ å^AM2@%#N +1n‰`[8g$e,"B2F_ ƃhJ8E̎4aBC)eYi=dBNRNHR@)u'9 d}ʢDpǨ'!)6+3/ mBuzjAU;OqLeNxgZ &ΈtZ\,W(1%c1 CS'$Xr=fKN*2&[Qjs59NNH&g"-kt]59}A;܁@4{="T%PnQztAi!vױᘈY˵pAb"&4wItOZ#U!2k|M?Z jo{2._׃Px{Q^B2{3l[\B,wOzqK-_>ci86lf7ŭovԷFc>~`^- |aָvH념0UYHzh?`o ?݄6jUޚXJI]pJpOdZդ;h(E$>Qo#@䔤BpnBPnLh- gB2@o:I@ZX(b@7hdqq,<҂F0M٧.Z_NqV=z{/.".d۶ܟ蓓o;&aJ4"u-{N]v*܁|mV5(cryxpPY >1.a6O(`=*19ILqN-sIü'L'8h- VmiMQ@t[Bh )(m)N&.A(I˖5v+ ]i щ-) * Z2Iԣ:L56RȜ6Leμ̙+"UMO 1Pc'o*RD|v_ ./R= jmXvkq-_If(]t[3csFq *O([;| WIy*7Ϸo޶|P_CKWݓr|6?>|p=Ocg~xpdn뜨q)ҙg(%gp<ơ9\Yb_|WBE[DIB|]ʄ)H]KPie.zթ2o˥7'~Q73/?T}7.o3p:W{כ]OlQ/asQ+wvx2A.sOSJsa!N~&~D/DwzeJvd)Z d(Rg^KK2QduLV=ZKC(uP UpԪIFs-(kR3qDQ'vH?8{`A{bA;u}cӫ~K>hQMdSE\F$V2h9{*}hQ{=}$pN$K{H"C>K)efJ1lA-@Zt Eu M W 2('\tռq9D$"dJ B=ojL-w7Γ'A B;fޙ Iy gɘRC])r˔KVW~8Naka|v~Nf,ԖEvP59gO{:?HvHAv0vrcV;Eӳ/|cld&-lXrӳ? S\>|d~)G&J_&-t;mty#S7^:;Ui`qݨ/yͥ?2in>BvwfZi;J빹Z_!Z΋?-m !f߹Č w´: xBР!}S=(l}s9)՞mZvCZ|+]ڜ&v"e5/}u&H[(P˕1WQ$@%RSp?$#WkCmJ~!&B;0 aV}$;\AnT³K1uGա mp f&◕Q哃-ȡdy%< 0*fc%FuP L2䨜_ZP$ٶ6g-MgK4X8r*eF tjTé8O󪊡攢IY5/Dž%_%)jj*VN/Qu*ՒI Bc)LdO*$f2l4Yn?Ov\P>JbѰ9U"VDmER%1!7 J@/$+2I5 UPyZ/jUC#ΘH vL-C23NΎ6)*=2hBbz(b(*-{S28e2&gPoLsE0X鍔"9g Yx~`2Z7QfT+]Lq'B6T1ZAJk1{I_5E1 RM8tyu>حtbhE_@f۸i[L0QgtF%e NFs.QZi~ɨ{m-D2[lWՆ<ы_3\c>yFakLE,2!0Tř|#Xr0¯H(O:Ȏb0 E}WV1$}޵:񏊵KAvr!r V{N9hk s 1xI}Llӥ0'*z!ڴj-:Myײ *=IQ>Ҭ/}D[Tu_'dXf?>åuSg_kl0ڼ0ڼה}f`]._ckĬwoRvMFff/U zlw~zzI'7%'} ,:s.;I~*"K]>Œ2Lrvj >;~82`Q kO -ɡБt,M!F;Fyb-ܶiy!ݳ ڮxӚ%Jdƨ3+I"M |tE2cqts\M[Ye}8BExa (>.5S7e *DeI ΡZ¥ƆL- ~Rxw)LdUf)L;.qo±`WNޫ1UhઊXJ*!:\U)NpJ)#*s*wc+VI3tR*?[+㽼Tw/rm>gT3~Gf'm,GxX%fNL>U_fgK3?gt'3b&?Bh-w7( bU`FUWMyLWi:LW)EFU ,xXeMUእTbbo4p@1&=l1쾆W\E2Q\x*ñK+:\U)AMpʢ^X`dz#Z5f,pUu^Opʡ FWU`?|3I9Қ*7\AȎʏGqZinKim bEzZr)]^к]΋?/Ɉ̬z߾-33vjX\!!y"_l[ŽUѼ΢~_|ƕ#gF.`",R@`s{xsm=ڞy-mH=r~Y|XAOݰؙKyWcd9Eo_~KK>(b [#bancդ]%!>!!TJP.Jra"]Jq͢+ufSMWg+NM+ѕ4ٕ^{[וRRtuM'64)iN1(mHkוRZFVfJܦ3ԕ6L])DٕxS兴׮+P mQW)I ކit`i]V(]WBaJB~&]887Os՟MW؛N LS *]RRsUqv?Xҕi.PZYRRGW]o7:ˀu I-t'Q,\nlnoד$3 d])MJi׮+n:]7D'/;Y7w|3#ydַв#ZBƜG6&]hQ/s6X4˶%4E-[$:{KXp§gaϲణ !N`<ʂ>BVVVo+ g>!wn6;6J)e[=G]qLDR`GW9ͬ*f!etut1N+MdW>YC՞r9*$vbP_:v=t7"yS6vW8jvYoxMO׿/?}$wXG'>Ѕ ! ;x#ݏۑs'>M!(n deg!0a :יYt֮+\}oEW4Ov6΢+gWJsUL'L+MTJeW+LŠ?}ҕFe׮+9*ZLŠKFW;RҦv]!`3U38spYtvKJh9곫p'ڊb޴ F{{^D_&/}UPWbW5Ooqh_ԇ7]YQ(}nJ{0o{˾wW~18˷7Jp)ݯvgٰ_~V~8M.~u>kŐ_] 7_E)C|=][Oz!䧋:߂ ¿{%l9Xs(G]˞-JLT\1S[1~w/﯐,ᯢ3ڷ//aruvY"jLZGpw %.x;&}S)`ot׽>?3-!4!͠Rzɥ4[ፍxi!~&oғzc/4HxP[c4Is(50;l0*93ɠBǤsPM,R JD̝,%FR8hʒ{<@:*ep-܇xOJj6åhFHT5T RaMZ7b'Ss38uL~&#etȨ&M(-n pLXJ0 !jkcA~ɈT8|[l(Mdx@buȦITцl0ɺhᤥJ#7gc)E&v uAeаB,fGhϝPiK+40o'3YewHEj!9Ut,sAgj˲;!J1=dC?0}#u q^BT,bAuy8f]HJ*ާ^[Ⱥ(F^$*3RH/Aajz0EP6#-,lH$1b0 dc*V&ػ &NdPg >_h[ .M3IW'LN9V !8!a%aoy;wnL~zҽo^MZ:+.Q/ލ`!ی`-$t>g$=@u0>!K?tl*"Hjm ڴLHư1 %O 9 %tA\ גJ$JE91f^[1P>X,љ.eAQ ɂ+6#en]Ն[  Gx[zQ3YSY?f}k:mOU;o:[dQið6!1-}}zz_yvR)\u[tg>]Vxɠ0bˈ%I.>ruԆPw$8;~AJA(C XtY$5IΨhvq5cCX` IG7B* =˚nTG͘+[552 % hnpL[ ݕ= ,E 3^GuATgy.  b38c\x=m>sWs80)ý,޴p9 kx(/7G-®U.L YzL` 4.c#d "e~8cVXZ#e̍L FuTr'Ek&I "tPMIA1XeA6yQS.5Be_́;J*YkDڠ@~ G`93#aв{z O#|J7a#1>d Ϗ{ =G!cY\=V_7x@A4j9 9$UU]  c٥`#Y oRnBD!2.]zd)`4&(?`UWW$F;`3;&\5?nzՍЯfz/nW뭀wnB xr-x#w_]<|a:;ߗx6tE~$jxtH-zw-檌rV {=ZΖt[eYLڏ>hǻ6Yv?2L'cWjam d}&h[k >VG*@nH:kf@J yMEe@Z :e*}#%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RUB;$%(|M/F nK)%3'-)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@=W@Ɦ?0?0: {&%a(}R()ӎ@ZJ @ՂzAJ'na* @"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*$j>$%k3%3%:{%PEI @$%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RQuk[}YjZkm/7JQ_Vh?$[1>¥a^TyY.g]E!oCWCUEi +n/Viwɺn-,n ϫG>@|Z{Q uPtIc{WYIoGY52BU2.[>MYjW |Z湺ԬptTMJIlyJYI5}x0U;:eg }rV8Kq9~QWS{&neGC}~.X߲FJi|mSFI(eճ[٧}r>[SM+hd5GUA_6dc=u1:d˺-n WmMѣkVh{T/[ᨗq{'u;8YK(}goK<{Jx/ef@!vsy%Įh{]QZN!K :H.5Mv1;ԭ-k{05bzѿ37zߏ֌֜s6膣cfN,sO?H܀h6~04]:>Zzƹ+HԥU‡CWCVUEQCW8 %N\r5heי+JEAK+caC*`3]|+ZNW@g&z9ter~Hl}ã ]U9-utefEҕw*ᬈVF*JKK+Ur@tUh$+Z{($fJ߳ /!]祫>W o>4COwp`u~(tkM骢ҕ՝s~{=V4$C=ڞch+`evCa+ʾL,UXZze<]á IWlP3 9}0ھPˡ+gj@tU9pPP Ή^ ]Yf<|Tp p3CV~A 1Ő+v^ [5GX{W@5#ztB *`CWCw<0 2zsx=" ?w0B?/]V=FPyW2DWzΝAp WUEkX骢c ] !+~ Ul(tU>qRTЕF *li<&s=z9I'0pQB@CƦa;GB2Y^p4xsC`jOhκlNvDwP|:-fS:å~yX/zݿ޼ycr1/G@;V&U o0_Jc/V[}?%_]Qϋy,5`.>Pm|V1ol?׿ǫ)\𽚡0_G$u,DgSxtikt)Obbǧ0Y+2!0m|*U>4W"]б$h.\x`[cU_ʢLtΛKuLz?gAAk}Ĭ"khٟ2gm.9FEsF";*Y`vjp꠨nkvF^,Wt>춌066kZ>aWCڲ.{b6y?Ww $9^+S{w~m9dPӌNX嶖7c;+ _?1$5+5ZMm5BO+#ҁ6`>H~q1{Ҡ&ϊ -꿦,NoMG,=jsݞw*PMu{r,^\[MbYpq@>niuRJ=; 12d}_mb*f/n5XЖ}6eJ3m."Dˮen2 WN_w|`]\v2,϶n6_yzVvb;h: ޟh0WQhti4rǂxsg_ׯl~;.jaw;-HO˿7PO<ؕ S˒eip:IN7[_ 82eݏ돟&|nq[ͼv.~cK3y^XDdZ|<&Ƥ~WK]̍b ~5,)Y-3W=ڹ^;okj'Z~aw;*FZ| pY :Fs=Hc.6s"N>qWiyRcVAs&`d##/ Η^QrR%'Wίܨ썏Ayz[BM:ۋûK:g?q(XMf:1>*9r, X5F\.[$UYSk"1ᚑ#4vd1Oz>Şȑ};2HjI2p<(%,"dJ0Ls)YiѺrJՑJDM}NR*ݪ#l 8( eȎs%~Cj{*S+ \5ѷ;~4V>(+/oɲC}+MJ1w& 'x2i#&G܂MLYҲs],TNxVRH6rJNu!#O$z]? 1:O:;v|vys?Z Xzu Qo,]zbg] 9,pޟSj9TV9sZ_|{<:vPbk6==ʬ#л9FDvSe.ϷɒGMݱkm^5޵5迒>$[`Ak|[L/(َXPq+jqeXnmn|=66BW3{b,Ʃ/w1ڒcu&y)dF>7X9Lu?nI^8\9$``^mcvɸPJFի QjU4R&N9Ɔ/DD3Qy|qsYA\~ =XzZC)z߂5%ޅ9rYaQZe*lؠY fb YZpP)\D A2Eff1g& .*`_AwwS~}/Z]nn}6EQ{3Kbs1D| ?^qp1Rq6~$D1%p! $9[{]p z!HE64HuCS SQYp5KyTЦ&&G@A[`RwVٞ:{#RؕFх#]ޒXVD潰':y^ucfuaDw7̤7Y\lxM+gCM:ٟ]ǸaXIt+vwN縠AG`%{|ttryg=S/<|* % 34}X CZ @"~$'CW6o&'=-չjSubE'1!)dЍeR!X}ŧ,մt^ F8S,DD9f?oMiH §J<}7L~;0ZWcUnB(q'@> c3]}V#]*Xbw0y 2Hes]G-i>ϚxHLk\|M6+a+x@B$–CP+bIbz%3' 5Skf}67NFW;΁{S#%M EUl%,.!{ 3zefZyXäJa-7ci̻ /OT[=_\ mƧzUnTX}^; 'x~Ug/ъ <"x9=|o?4ǯc73|]ߖ=__BejV0W;Q|*٬ >v~@tҒ>-IuZl& 6˿N:ۮn f'7L?oG+a5H)WFQٙԓM{-)Ki >]٫*o.v׺|O[p{B9{PV?}~)Q,|~2Mq嗧Mb}cwP;~ ofutNܑ<5YEީ;A{=~?HDJ[GO:zs}z;Ov@ u#ki=n7g:_Sp;#bn,y5ȇAjַ_}k1싸͛˿z ˂__x>/L?q7d^(Ȣ}E:a|Z>l;g%B ||€^NTlr_T ͔L%rlipy8?;VA_j.Mth5y 3Ր7_-6WvrŜq9?IZc7@#c`Ԙ~Ƒ{'] $k0ނi8h<]m*7Iww~`FlOm6u]\#5a ĠʹҠP* C5b㭨dfԚ[4wNbߌ/zSvQI.ya*<\cMB1K,Ն%cueӲG]Lƪ %h\ b2kP#Yֳa9P6zJO/>T=<8@)<4M}zG-6)%ȥĬȓBGmQ*>N8Tl09LD&SVW(lj5Im>,#-Pk*WKI^1I%A$ppL.dĨ_)̥-cXէ0ʍ1~⦱9T芋f6r>VKU@Vee2GfDtnV2sQa"\wSyzT2Aicp~Ql9Ք4шvz^}I^ݖA;Is# ֎+&AnNwUL!嬿~> q7 Km @iŔ/Oz;STk2ҙ]˔H_Xu ӫ54`edGJX PvV1sS$VK+ R&\p $,/ c0Zӥ=x,Θ?D1_C#$UN5S6cH)@yj=C]YriA2OZFqHfQgF24JJ" L5B*Fkz|f1m|sٖ/lWCj\o Xr%%č8kT]ea G[I~)K;o%{u* lI2bP٨%%E~"=Ibʔ~qe*"UR|; M, 3g8ɲ6aƠ.t|e:9_2[ey}>=zL⥚'BooUK j#ޥ,(5%Ϫ>bLFoWu/;/Cy{by"S)sB]8H3/}Z CҚ$ -[@@,PrܢٹX?EP]jfΤM+^] f"Rw-e4< &.:3wI5&u|o| >ٜUkagI9$ M 9u3` B %!%GBzA\w^71j6 %y";F|7/PߨF/N&4W~4m)& j[Q?}Лl'mbā\p\1 -hYS^oژg!o0FŎWxNNJD[]ژ`'ھnwLK#v2P/ 0+-ȧxZ#څ:wŔ8A8eLY5VB ?]>gWG-p)b$ X%s@bKpߥB:飵=ުgr%A̡Œ(o6|brE茝bUۡQq v:QbRdUZ*ZlK*[<N~`E6vXm*,Qs(a&',asD s95{U=Z9sԤ~"+GorGPb@dS:wn,U$l6f3ޑ6o)T(`m]R5(5賵rS#52ӖqVf Ism! ~G2^}x }_toLӓ ]4n`54(:[έh[F @s5Ė 8b{ 6`Oqzf~#p.Ul'@HS0E-HJ/plM叢.o)9LJkoq]ZT] gŚ \(bbi[/YVFV@ cDΣp0s`n%he}%gyg2MEpK U?,j~l,VtKqbTplK9'nr~"uAMo:x͌*MZxw6Ԉ*p@JJQ^N)Db %9P+ ^=IBh [^5m֥I&/Pͤ(xt,Z|`SEWetXK>RA }1b%8K)mZ'vTTM w B{Ҽ(w0xi߯诘.mʛ^ 6Z8<޹6md e@ݪrm$z\f?%X&rL )QE B'e'Ei4{2ek{+V$Ⱥ ]9T1>E#kS7 Z=2vQapT@Th-- RSC%K/IieU8kd#nJv1Rim輾hjÍ7޷BkLIZUHZ= TzHί:djzW}TѺ=_xfEOrG 1 1@}㪲o{LT@qM5՜']-e5Ѳy~>{1鼥[qJOa:r?`jJk=x>k|Uaik,ݏ.)\tdVh"deSrֽ3\ETG'˸UEtk={C+}w[ ]z5~̓M-߭F.4W^熕.uZ~y>-O~uׁ:fU|6/{BȾ2[6/wѱ9nvӘ}.Py%\,[qY|hݐ> $`je4Uy 祲*JQ(SmyX[Ֆue囶v JE+*k5El$kFZ[pZ2TPz# O*|"ueh<8;ej׷$]9߇΋Żuj̏Yyi9:Э؈(nr%QaBUnBV^tem㥴h4*\[2 /\Q)SY]Pj+q\z 5zx6.m ^|mJ]{vQH a5Y2Qv6G\@p-=Ҳ-dThK(YR8f x]PioJ^1 hd1?˫8!a1(TeFХ-@-.V u0["NʺLgۧq~S$_a-nN6:ӬvVBD <&ъ.~ !wl QI]X^Kk[SRdi(x#lN٪dfW< Kƣr9ٶeJ#\EŪTqVT Zsk׋ꌟvL !g#RomۈU#)+FZҿt *-J7VS?XlX@ }WX[!+(:t:z!q.n}x7P\g'5o"D[]%4+2-6жUJ˦HC,q|_{j&ݼ$, 6IM &SqF2ʢb%ܯُ˦z-vJů&i}z` ~ϳ]`'LN/׮\G}orJƼv{i`nwݝm[h{na4EξtXkku:t#箞o-۲o w6dknx`7l>i{W]LdM&|˅*W<"p3lк3 V,f,9H/wۍj4aopllGzP7#zţ~/NW*$_IeQϧmg+^H]i߄FDfovObӏ%m@ %_[YGH׏xna,@Wk:?.ɿ){;Vw̿rΫ<\Jhu#v=X/Ƚ/NFԻ9/f+gŗ;.د.؝sFj?C#ێs7g{m˫δ5/5o<6 ´hkQR(6UJk4eEESںQ6Y}!F {AeW;Ӏu5λAݶmNԨY7-("=~mԺ{6tHxӡ{,p0xW<=ZE'PlOJ󦇙7mV2x, ȴC_@)K pAFJT4b\cZƨ+ؚhtŸhczEuACT;G]-)&]=]Y(Vuہu w3DWhF{@¤R_=foI7\:zMuΎ.z(ú0˺%F/%r}=G'ٲjO{y,e![kuPURmJ-k\SN{gzn]t_wKIonq~F.ӣBȷ4?=?'6w k@NiϦZd7r_ 9y$4try"_?Ꞣ-i9Уڝov~hչvr6t.%_C*9!;1 łO~}xi IW!7Jm(͏t/oKyl㟚׽}jqVUVe΢.i;4:c>'[/#o 뇋DkL;T[X/J'LF)tEFhtŸRĢ+"t]1eh;+&]=x1ls G99Nޞdo[Ƭ-Sx:_E*W sbЗwlXZ)Nz2r;f;hVs/=] dp=t咮zN+E4b\'bz]WD &]PW eq:_uaEټ]G|њZGVtbblf=L {YN G2sgJ{~ Vo'"wM>ӣW51,T/f Om?^U=FbFOo+}J-$@ͿL*;]6mtzxʸ^u%'j}wyڔr5YeުFE{4WΩ4^AaɛKS&#x!^ipy#͡>/M)Ӎ*+P%6^OE["gui馳.s(}Q9IPuzc|HSLi^|AcZ[tItM㢏ih7-02iaM cb[jEWLkE"Mz]tEVqøڏ/]1 ^WLieue؛htERD3,iUbJŌQW+z+U<WS$Zm1t]JIWzӠcF+ !]WD)Mƨ+/@ htŴN+iѕ1|]}^JD+htET"J:jRR٨+BE+>t]4>jr: Z0VRi%:t#4sc4JdfL~M@D"`ţ+hB(niN}WW #N+u6]6t]1rIW#ԕUNaL"`=\~C'Տb`Jƨ+‰tE^3qb`ZBSB1F]9|Kꊀvqc+Dt5B]ym3HN3ȸJŢ+!ʈݲވc~%~sVW=i0zRz]Wcz RۘtE T4b\)b*]WLuu@ƈth+&t]IWԕOg t,.L/ar'uWb^HHT&3Mhz/ Na}'-;~2)N=1ٌ|ejtktUDM .Ѵw0JM clZzHL4b\iZB'%IqZD"`c|4b\b&oOJŌQWhk!"]0xF1XtŴ@Iiƨ+4ָtEC4"\o Aae"JIW+Ga54'b\MS;B2GW*1EW윍FW},"Z?=)Mz2;f<TT7vCGWp3PʁzQ#2dվY/VF+ơWO;tS{?ڡ*([?qtPJ##D+uDWLQ*aƨ+'旊z%a<{ovӅ=iK7h镝j!egD;yAnҵ{YfHi U:i)IM #lZ7&"]10µ2uŔ F+CU $D+U֨uŔF+kx/`pXte* Qnft5]bFFǢ++t"jr銀5b\ccbmWL|uPijpuEZѕ1^^ =. 3ihЋR2IWf ^f^dEB+t2jRtE~3؏+ Rt5N]'-HIGpǚ*F[# "4Ke4jֈ5͔V$MP޵5q#S9#~QVu*Qa032Ȑbgki`(j$ @FJ,jn8!s 1W/݆)Zn}%g1W!wRP˄*y.UY̕PBd̕$scws2\I + tvD4Qz]9*+4WJcEBY2 別bQI͕֌(ɘ+9F:sԒ/U|E%Gyg+=;@0O*`q/|] ]G%Y0W,BT 'd`Md2ʑX*Q+D͕ozsE((%s3rJ\R\es5+emVaK8D7|X"%bWN[\C$â~߳KtSwܝ ]ĘdS4,) q*qAA8ǂK5o=1揧KSh!Ytݛ7A4t5f~ &*< (MfZl(o;0gl8A^kxN/ǠgoW31ލ.@U~A3^ٱKgd2iF9gV'1f"A|Dh-ѓwbaw~@lć-;E`iƀXT mΊO v"]v*HֹXS1"3#M&Uk97aͷNBr0R'j{8@b6W[,կ%x+y~raWpLhͪF]LQC(D]q* +Jw &5eU`C $ }NԽ؝3a85 ~Mu:=p {5).X<bjàT:&|fk#s30ˁB^S]ϧqn}[:I,ND˧2-w0U;i`\y!V΍+y@֚8!'osyfy=N%x>=Vz7wt9{< ttvEwt1 d)//\4\I)ۃo1Rԝkvo\Ir}L5WLN Ll2kU77t εܪ/g"E[_TӍ*:Z_V4&J*x}a/סuH#O@٢CL^CTK&`'DۜInS`@v!(4jbgw:30AC%t UAS ^+PNe8(Xwt0L2.!gѭ'w?rv꩐?=%BR0BPv "E1jrd4Ӕ5oE]mCէυ&d8R 1y:=kz* [qӘ |PܷlاBUCWVdҪ159(J*vOr~̋IGXސXecd|bFT kvtҒg4Ew>_VYJ|ںE*op̋"t!I:w'2ZV4E.ާVM.ι{/m=cPLTK2] Tfη 'HWTDܤXq!J158PDpBsAJ<{6hߣUϼ3> [$7P*h\|1:RBY#qVϼJQ%d? q)bFc*#xi&,?x3clՖ:j`EpcK#|Qt}gJƄE; }㣗nO`\u6uj~g5JHvA%] Jcr-$zvފev釓:ZYZ'?~fxi-.oޘ*QIIh]2 "T4-&K"hUbXΈֺ@r-Btn?XH瓫zEWK,x0m ȝP 5XƔvˡĖ7fM#CBp FABXaVB\aޘѸ]u? =(+N? | 'Wgf)mz04jo˗/ZIx}X{P-ا?N`ę6`EWeeC2> CWbhӔP$_F*nKTҲjQXjTc5 i j0ڨ [U0 z0{ao r \& rVϼbdkÿk1rØ*%(;kEj oz_(4R4^㋊d:Z0&n$]NZd8v湥embP֤s|FmTgE8fddx_2*2z2˸s+#۷h$ NؗL'Y!3E#% B'͋e{θk$uj;z&3BeYuOrO,Jkg<f s wnjxyQ|a ,c 4sdB,%f9L֛Y|'  &vwc8NV˟L^QpDM:r5E5dp73 YF~G&뉖Ht'gj]t]6Cw]N-N1V@P ĴEۈ퇳>w Ю|pw+GfF5KfiUii(k[JJ5#%STJRuKnj`-^O"کHݹwcǐ[eAZ/}3,"1ѩqWFՈĕ$MU`%ZZ" Lϸ,5(Y p++SR]Tr.I~Tw  tJP#@#x~ܲsw7!sbSπmy;@$N8 H+S׆LK#Vw`>LޫrNL\.$}$&(&db)ڿB ~5AQV(89$Q(s2 f6c@VW2pd)|67P}0:ZtauG|*jKN $#<(`-q~*d!I! dFrc~uQprˉPΝ-ru}eo/ײ~OR n-ֲZ y+8f4Kfcz$f9_k3JO%",KT|8d[ 5@2)(TCrK|]0}H*;$|~Q(Կ+Xl@f?7 d(^*2q+y>dnj/YCY|t`% H^vqTn r@q a`4 olL%WD)ً?Res]{nhp!E1+O4t $*˒jBXL_NY NSJ7Q9hUBȧL-0jfZd#B;DwE%Bej8?:A{yc(=p7Gž.z/'@_f:Ώ B0o9"W;PH0,/s9E}a?.f^ {[ٕ[@@EOAw?cCu~ℜ~by癹5/<@xHŒmqD[ z拥)r<=oq :z d)//.yGmKH$Rv)ɧ@O߆y-197x@9V(XKH{tZHޖE!J!uAD3r}ܜS(O&7ҹ4.Res,ɴnȚ};O:SRYUb* 2[X>z߷i,9,~iK:@ɏ8%¹":md2AQ| ØT=C1ƞpk[td>I<dpnoo&NKf֑9fpXITၱ(سyݻ{qpk49fc4"7gV7.&4koKw, G34`R_,pUv{wkj uL);gbDS*GlKkz9U;sG9gɢ$jHX,}H>SF=nSY 3ɂ5;sPNUr l9[v򩶗,~/U\[-Te!h V1aE]YjJiԇ?.]}hrh#^f #V ́z/{嗳CJSK KG%VZzҾsNp[g2lLJ^>AkC7]?aBGaob} ~r7o-4/u V`A@:>Φ {{e/=/'(c4pKAc@>qlzv>Z8rx-"Pr/$%KB g󀜓IZ-~ϋ%U]^4>,Z?W+iճ[\ b.}?Q}8TU=\ 19>sz0Ijqg7x;|ۧ;wY}ںyڎ/z~:_Z fy]OQv4HTn1"ZɃq <sR9~HA`C6-︣:Z{r<\v1L?Y8v!4ŖwlƮGvO8%PkӬyfwf1$;YQDGH9s×b/' $ ?@Z1n5 :5L4HU[4j ᚈ۪%QUKk!UjPkEɻd)xa0c+cIi5լQ(k!?U j(q  NChDt-DB.i GM !Vs˴du_;Xۡsv EeI(%h,ң]QQU^Hd"\b8J Z`Ue!8"hAh-`e*_&IMqQvW Ȇ)DY-8 8jrhw^;gKZcuAB̔[J*U mvךJ g NTbgK-uؙ SX!eUQ&S&V ˢ+wЄ;сq2&Ζa(Ɓmt9gIa>.mι8(&! s:?y(i\dz]>4+WH7TXkT}:6xG)m_C*'JZ 21 =R+T>NEUb2_몋}?4ǻMy & -h3R\H([<.ozAU%Dv]NSM/۾YbP1ӳv S [Z $M._+G4#r4C I˲޻;2ÔU=Þ^l6+ɝL$t"7;/ 92qC4fI@ Îod$Kŷ.)l;|I7&J2*;Q!gXe}a@>LX@!S}8>Ɣh\Vhs.uGߖ4>pj^0B}Qo-Gn.4X%۝7e G Zrw>!\J%G+1A$$JL!`"4t|Iׇ琳XԞ œ-nql%P BD'o;7F%SH12JLwW:Ѹ9b}"XEHuAtw K -) ,ExST$0w ݅Fƽh*twa|('M^H(*hSHm!PGļ4K<Pt-ǭ1)\姡&wVN=׏VaRh;;umlR%ֱ}7;4ѓ!Ep@SBE^%4RgnM!G 2eF=p+\NvRFϞ^x:Q73)aa+6+ʉG̈IJ#?Q)F!ulS ˶[y>K}Z9M^6el/WJRkmp3i!8l L0la(r6O{ɻu@0$XZu% [\{-5aDNZ8,9R)(RJ@BH@R)! 1R9Q Ա.# 1Oa$y!_/ (an3p`Jfx I UH˦էT<q#K䳾{CL& *SO!ArO\00{$(` zIUNL`r`V`>f߼KjR&49|!#$=srG]!AettQSr?PLHWo}ƄYX2ũ8]rq]MVKkRˬHȰLu5'hХ4h %D}SUEsNB*<}]ցq:, A굜/N\:\_+-I1rN4AC3A J%Xe'MSt}T5/0Ӣպc:!U43 K2"106cEsBE9_.k(,K4[>׳'c:(A(`D1epZ\ΖA(іŚLZvTm(9/kF^ziRGHlF#ޖo?"Ѥ`ZZ^WքC^|CZ׿mZf*OPOKw_Q ]dd KWcя$^e%>t(]읜P]f<1QJ1 *JkZX[)Ӣ=`cN]A#S?pK)AE]jU@*JQa(Cؚ ブza71vwtXc[5eC(e"!DPV)k!5'2D~̻truRkLuч8R|$寴xEqF>ld;^~8Z; m N V"cvIY GdOa]:ɖ# ӏl},:ncN;Re;ۛ>+) RMpvxX~}ۧ E$Uspfpġ@b2j;lc\KM۴1;Lod+@mfwIM1Ld!@1#i$3NwVh 6Y'[3xUmtRa04K'/KT։&(s&^he%$V͢ ߘ(sl13g~ qxΓr%0ҕ$ȸ#EB*e(ブf8w']/7GQWbsq?-{XrXgl\UUD`k BM{nk>;yϕmȚiz\^x)99n(i!i96 zg *jhTP\3Kp Y!mcNL`%KB<)K"vg|+HΝu&`Y@NXvR 0}"&={|艣^-AG\\!6 ;?/z1: ,2Dd9[M;lVA@P _jS/ٿǨ3( Xz83T^N]ou+(гw PB刪dcdLz2/b&Hu6[֤:BTbr>L1:uc@ !CA~Sl A&䰛e>},ʕӶ9Լ\^F^HU?zc>OHD!3 ԝ @@|#\(%O4}|I$|FI*1+a8ǘܕGmJ԰AH!mWf6uf|ecJϷv@LzS={İ!tf q9Tt+AQN;% )5%i;MdZإZ]j72t}e)jI$՟-99Rb><^|,4bٛ{>TI>oڣ88[YE_#$w7w2wߧchFr:5zY߭(>gvk\B c>kNxFd۲_֤V8^0K Aٜ JmUZw-q4pLJ һ]b>|T"|/5>=FT^'Ypd^Oȫ{g4zohFGG)XNc׽m AijkD%Q93Z:ż@wxq*-3o|67hˆRsm nK ڭ;vNNqҟs>VYxsӽ?"t%S}wIN1Vd5!;u[ͷ{5L:ڴ(S/k (8Dc9#Da1'JDT̸Q=6ƹw'2t) Z~xiB?=|a!˥GA(y#`%[,'g1a+ x>|਀KflAsWO1HknV>Ѭ䏕NNXoտgt2Y[_s4{\ѢXL[[*f20Gu%(*Y]"\8;S=Ư{pAGcJy]XH"0XcK"cFt+e( 9{fɯP "ȕŀ_"=A8U⹸P_XwcVw;ސ. 9Ѽͥ3k7>ټl=m.Ĩb,X&C >G?vWex oei(uVzyoWC:[[vd]!JX+uƒu"ƾ>wj,lErvFJ-Cj}dENV{ ur *Jy隺mDiMVR$2cuX_nj= I M>S5ǑS&@l1KˠuG%Y|bziƻ5G 0א-;ydz~9>gv)kEr5P&$=NtFKHrN_BQtQŸl8]QJ^sͼ8gѱT{59]5H vqH:Gc8\bb};2sDgnd 0VOϬ I0kI:;}#w!)km$@. ʮcApٙܗ&}6~e=ݲ5s=,=GIȽX,O?EX˵@Dj;+esFl$t6.w._S'5cI.\T;UYh:)bOo~Mؿ2̪%8ڼL?^9I򾠸^_}DL馫Y3NfPDMuPLӚ WI 3]0D '&5H=WDh>hfÌƵbyofhA<.#6>pӸ dOJZ٥V>tm\v2Vv?~YZ"<B尅4E1yQu1kq:`bO L<yBr<إs Xg4!*Y.i+qO拗m_B|ӛOD,ϩzQo=&lW8Cs? e;4..ٗ~Y_oק? MAd(S+Md`.T4O_$`AbLdh:jق}_ )E,pr)̽3v/ss)Wah՚o8n] eiMC:ˍ$1]W4 ٸ' vi 6_  p7w^MX(79&We#"+څw;9NDy^Fr. c\L&&%դo9I'!V9᳭7ƶok{R_p?FUi^޻9L ȋ4o}DOJFjioۇU0 pYgcwlqQ~jT)ޒm3A+zm6h W؋}M)]Bh:7eVTXr,1oEV77KD LRˡ4yl{Z LDrōh<sh lLQ PlOٺpw7,F[~x DI)\0ĈDs?Vؐ&S1*0HIv[s36F #PV*"]K`3)0Z@ 9Ŏ+1 ba!4Tj#{xE ՗3YBBoƨ8FU*[kݼP2oJ7 N Mozף BB&Xx[;V7n>2Ńu=Ɨ~4YxPI%B"CH7Cn]~g$x\]7VZH9]/0)VR8]U|#8v}VPԮꙈSz=o9ƁLT!kҙL9q*_ 6t&d_9)x$5warn||?'g+^`ˊ=Z~vF4%)ά](h {I|دaS3Ͻ0 vv3GGUQB7˩PZ\׋g_R´E~w \{xRLԼm`9ɗR^TkB{ָjT ;U. Sv;Uyz7~`T4s 5+~]b[Ch*ߡ0ӵɛr9]}vlrjP5omtD0j1b7S(Qn=CFDkwS-ܕ =k\BLiJr]سƕT10Zk4?_zQ_:{w{oxCoNwMrz754kӪ]?&lz/@JY]n=f։YHzFA]躃xL]z\>J3ր]P 9ﻺC\( EWD"V_ӵ\;).>kqp0p]98/%v=0tfې`$!!l\עטC EAci$MADƫ{>`f/즽^["Ҧa] HM@{6wTBry2kFCppl.מ[zSnfX}஍I$'<];:&&dxcdIn|?"YPssw}oVK't7G bs4=nib2u57I!wJro!k,)/*?ؼyp<[՜5_hS<)GP="p%,udq=\RONoV\z0#9>(IMK#bך˭qklX2z,̔mT=\ ӲΟsl:T DF7/> O}\r2YJ4IG6 \K2ǼSȗ xOa=gAڽݝ("A\~Av>H;z0QgwDuhRF-& ?ľj"7N͓S3.S9#G4SNLPy xXjz"yⱳR9yK`閙f\c**Pd#0SJ!b y 3Mt.Յ ,k\ 1&0 m HlGr;FY965l-7J*O'n߁tOfEu|W{Y+nV5\FV|$愘vkZ~30^>?P/[*j0E162>Bͷ%Zl.A񙭰jK{XӋ`ܩiuSʩݶ=X$E lV|4Tr0|{ 2p*``7Ԍ&B$WڲǃΤXb~2hkr;Ξ+1-H7fb#bCDd",HuP!6åU7T@dHVsd0iM5/~ˊh-jQw!;,RLbmڤcHgX %u}%b\~y#/ƒvE-X: ȃp o ›u,`t6̊0J(! N @YMz(X BCkS. &0S#2"3JQSp$΅̸D{֨5xy=\+/\xP :^U=XZY.v` 48'-$L8@3^gO8id`ܥ](61.qE' S -hMfFqQkw|&Ncyg2ҙf1n(E 8I4O$'ïxmyf^ e4JCY76/=`Z S҆(H ,y)vM)#IpO3tH$䂰,_O_E]̮&R(kq::S-vLhC[3MNDQ`!cPe(ԀcIr[;M\pYc.J XʁGvXk'h[ \fL 0xo TJjFs)y95,(Nbgr,鏿j'.lelFSMD ڝ@9 `%@8Z4>zk܃r)0-PCCZ[Ej96Xk0YP-0x20c"Q+ykg޵elIGھ*>:eg8D]`IZY-)Cd=Xr!)ߪ?p`BDzm/Qo0'\-:5wxxN`B6j?,"ͯåQu)^Bg_Nf@ʛrG Иxu7ͦ~))EG*!ߡ)Ka<q<7T:F'<2 )Gy!?>>B^_N/z }\EլqATN[[bMPSSqmڮ<EziXXV@2*i, Ǹ.0_='bBuPgЪtUnx/ugk⎠?ђEIG'Lܳw70&td>[O^hj&A3\C(,1@ULF2O 8?<[,YS39K_zLld94DI"iHV_TfW[rh4/>[$F#R$bL}`\YϿ,m*\}Uy_/QNe!BȈdxZϊpce5n5!<:v.k\ z9_pn22@ ^7`˩]J`!b;+chlH =k<_%<(-2ºd tu{СInWkt`hy e,ǐF+Jŭˆv-p{ܺAgZR,9 E`p}s&+ͧvg4o{slwU8é?QpV fR57/  {ָJƢEC9{,Y=$ EYhÙWrضZCVòa+5mHj7)zیP~9AWipdW!W9qU9,ɋPFsN{w}~7٤PaK*u\(%%OUf4dbYΊQB2Wab*&hñ1!1/?FOaFp5=Ζ|{fY֓YuEW]L5^oZ0(;I2HkykL@z ֱFbCv 72 #OQїWa ^yJmT1拋Y? s2#CBZK2vFFRN):\Jlj9 0lϮ0}`JͅFN8=HXalnb#¤yBjE"Ϫ5ExN0.p?f˵Ncg77??~0Y7S,~oԴmn?N|v<9"”@]&: -iG S^ZDүõǯ;pBpGQA8܈(x`;HM /؟ARv)neeYgUu);kO+xc|- A5%,*21o 22G.ehtȩ-eC5s^<@k~fvKaµT EU{9-(,)VkPYHC89h\!C ,'MO`78h0_)V/O<^`1Z6 ,] |2ILJi0f -z S|z<Ơ܉ "ts<JۮPN\K+ɕJ) 6v55s헧½#bG 幷IV]8kj0{ RĦW Xg1%@Hk!ݦunm60sZ8{REKJ^W&(ׯjQdKء8'}FpqңvH4)e1Xz`]x,a1(΋Y_T! OkοdK<`CD`$&cOr #*!σg^`Op(araU)2+'9g)2(O7NOI>~:GWy5nEafNϛ .XQ02̔qT!b"wDC#`{XcvZR>; L9XS0 J:5%+؄Mu4#'V/ 3?&Uߺ:.ɠ^vU376Z '8by:$ b3o{_Rط>x.CdT"qzZ&3\lq9Ҕh2`v,| x߭I(打E:J/؝ ~B 2I"Yy{}gP$9Y QfrgsJ֗;%p2*v-**%?E3XEk$R D~_%"qݷiqPr"6/He#4W5U& vXRT)LI 6&UȨ7lyx`K$P>F(t+dn-d^&2ǤqWFJ"ÈHb&7R(MC2GZ͹7+txR*E7PF˷3hl;> 'WݰYy6;iR狇,^aEpr!ERL 2{4i~Rs˗ǬMz`lS4B}RY!cȵps7'UCeWq:kS2N0="sPZ ҙaΐڡ\qIi*dNu U[΋{:# F`N@<;ʈoO);1O1`DӞ&|kt( V]`E%ec=W'VL;Nrc{ׁuCbHڬzK7Ϛ 6[RdVFG5t/j<8,#R `с{T } T>1e\G>N(qԑhT '0^j@$rRm튫+ o!]7~2tQ(}OH]=.zjK ÔkIUQH᱆RYPbndlWyy;`/ރ)vxkYF8IUy!L'IvL'+g4o. "!B$hQL [a2&*R([T!cA i{☇iDiü$F HFwsx_Eˁ C:C%dO-NXrT8{md<~@6\_.Vandn"-T΃T!]"Atya%﬎ S,(ՠ_^?~k҇qZaanܡ5q_0-8 q pf <2Zz]N'S0AO,$s&?>M' _· >hOXD`|O_c9v湊qr{ew=Iq'v˶iyjx iqy~ 2-?N c˱\ϛ\&ˋl24'OAܧ,hp  Ǿk&@ti* к1~FZGٵ9:pwDtN `lpGnka:p[,/X>4A 彐9ZyI(f9d-7y3__'2c'% Zjba?ȕ@5Gj,Qp=* ~;f,6O>ۥPmJL\XGSiu<@{֖803`K* iaVP/ucnk/VBSQ&c^,9(z)SX9ARj <=V2bwk+Rg\{NIn<.TF{$.M!gfB`p }b~bl$mn? QFp0'VZif!8\8;7FzoSNlP[M\eygpF Uû{n皂9hέhrb;SZRO/РBLK@oҲbAگI98 pƃ)F  o7Ӝ /؟3(wN`t/W7͟ve!}-WAb#@F:0(ől;zdo>~g΋GD,XW#%롐}8vͿVS!Ц;n{l]v#c+؏l]yN#q[#L+őmWnСPN3)v3zqhi7A=дw |ѕ6b] |drmj;p$NPXwiC1-ȴ[N G֖AETW1בS+`$l|Q*2Fތ9MwC'2RC#Zy B!M驑=% |dx<#ޙ@uU?r7i9;MMw#cb,>#\{m[W_"[ 'qW#5\](XP_-< `uXRaTɜ c>\2=!vLPԄ@)F4qb1mHPI6tCmm_\c\GrG;QO:JF"f4b(e^o-rs4<:M9L5Ey F7]g {#(kЙ"SJBbӗ9x~~_Y3ı 08vsB1Z.ml9{+򡥬_zj`ѳaY=ÆreO~"+4%ϑSC$ 2YBe[$G }wN:ir4bOF#EܒII]3NӤf)wd);9jeF {%"Txi=Vf(ϽUsjU3#=l^EGhf _RĦ)|fQݝZ IN X SI2'}&;ชV^wqsf7r%j۷gi3un9$*d\J.NJH̾Ԕ;W\˝HfDU 񌵺zրAVdoF_yP!lUsb(O+]_jf%) UJHPRn4K 54[\:d<Y9LRpt;t緤N`B檨Ė[&ΒN%Z]#Ze44k{rbBi Z3 d:D#h_VkyL[!c(R}XמO UsEF_Jjji/A)o}Ҵ ĨMw IE2km#WEvx)p6`pv<,ff7i+-d2ENRiYݲd1mUǯuL,mؿ;=r+8JSzQ@@ ]#Β2X2W67rZqVwI=[:Z"-̬Rk`lh -ciSv9/Vd'NVX{)bʩ:VrdKaE¦=| .}rmrXu8h咖[7neffXgq<C=IB Rp@wU5!pHaR=>z}{ǫe3t|2,XK>Mʳڟ)%a|.{8w;;6Fe*;:? ӻ_{Y}䳖b֚I;-{^k f;ޣ~d仂uʹ";  Yͧ7BaR~xZ''r I>2甒^RJ Ofmo]K,y(90_[ /=l]{e.똪\|J묪cIܧCD65S1 ϵ' I>rU9FͩGsНVC1_ctrZz,$KaX-jЃ0P$6HNsTAvZ|ZTkqYjJO8нQ_wb~Zuʣn|螥Rq}o%VZ: gh)"UwsRKDB.!Kn-,;7a^_>+d*-)YM ba>=VfђC iwcZOR?)TXx\Ac; Ҹք_NRyqU1¥t_RJE+c$-^MƳë>l;EDe(P*fsB']VX5ay!^ 1UIMcg!V֯~K3y \IL1y}Db`1 t/WrtƂ}oҼ*ױوYd<7\ï?tpd>!?9 Yb?揳ѯ?/FUxq3ɋ(I䛳KY">7` z>e qKQ֧YDasdlm]WqA`8ﺌo<Ƶ1u¼lֈ?oݶjCPOG$t|:=jS,Q 5\fgd+K"=s9,nA{˫U<&}=y?nc8]%bV[GF v}rhTM)%Q@HImP JOCGW^fNJI nu-)F ;yϦvII '1Qm2D%c|d\ܺ).N>ϊĽo3]i(l0k)-V=ւh'mUjwT&ĢN ?*w7(}@ҿv!uC*V5H ng5%mYn]2ᗌөH;/c{V dK `CO7]▗K F5tJnR=hFlyלp1VT|T~Xi2n1Ike:E5i;^uYe%/,*Ӳ:mbZ 4Ym2_iRˢ~b ]T1'hh;)-4L}:FlF g,H X+(#ph(@RoӢfzM,G}__TBѧrZSџӗ_hk3"''_,K? K(G]Q o-ޖ^鷢Qh0|@u9>/Ȧ ԮuD*}ikLJ.E%jKFS%oi6ofa_ϯ(g6(r=ӏHGFz4+LgcΈA?ǣqk,z8+3Jh[ߝ* "6 EpYtIߌ5"L1*g*[\PMeսϖ6s2䆓!_/crjK d _P(MB[Srj]+wղ?aiV-ۈq zR mb"tZ2mŊRnhhQVgn϶eo$DO6uzb72ْԡN+R(hoa1Nf 4 7G'+Xvd=B\+S%cWr`w%O>W_V(۩1!6sIA-zʻ~N#, AŦߝ;]E,)Ap؈WO{ WOsnXRQy膕R[eSWO;~&5ا'3֩zi WO>Zy ܱvV`CśwX`+af֑j||{bb0߉Ri-C PFb@#N)zVطP ߚh>f©.mjJ*BBelY腌Lcu*t5|.HmCBuN8]wN=/mq:P&K5KEr71ad/A;^]?^& Pi9j,)vZ,p"9l+_Pjtȑ+,y/u]T  ZqP:g&`{#[#dfkmPbN7w]E*]W]W{;QK]n\ɺbQüXojfmjfmѬYEVy]KNY}#+T|4P,W's"R"EP0&R2 9EDb4b)ddp-k!<*5D>yESXGpmwhPPw+af]K P-!Z,t\ޡj7ҐC;'y6C)cverS?Ǩm cuehЦϢ~ٵ&Eݾ̎Ո W0;󐶅:L]vˡn[Tvs0X*5OKƮ!\ Sv|sZLvڱHEm' Z-sHWv@J@dj%Ԏa\,mCRAaKZPejׂoVJK"SvVK{CXyҁ"% #Aej-ԎDٮaN9CFY2ԮP]vN"׎1$jBjT]Q;ݠW ݐ>u^PXy3,e*aCygj?n| Pejn\d5:W^;^ Jy( "a2n919)Ris0gJ#\NXAR9.Sv|s]vFfcެ[n<+ SL2[}/Lڱ:,X9#Hl%ԎJ ]2k70JcNufje2kbyN5)w,3C1 EfvXfwBJ60BKN̡Иt ;nߨTZejԎՈGNu,`ԄR.S{nkJvwaNT`[9ejׂoȖkvNXɲ#jXޔm<|JHd3."jGn4KE VZ ڵC&sH}v3fֵsv{GdG蓇ϹŰNv:4,9u]2-(H@'jeڭnڌ]c^ _/aTv->ɯ\d[ϗL*Rmk(@k|CݮS;Vs{Ehj<6.SЧHJ/dڱ:LœegRXLM+4''a:#t`4ڛ?Ϊ/F?|x6^2 q|T\gK),cqRM#'_8!T!T]@M_::vq_M1 5;i(OAB e]&XJ T^W%e+ƃ֐-dw&gyuǓia1a\ y6@%R-et+mR*(kvm-}R"DCZY]*J`YEJTjk$Fۀy 縰X^K[>:T$,"[-H3([| "VbQ-#O - JWGr.b5F[+*%urJ Ϊ=?Ft+'ZPD,qX1XDGdm& O?N^$O09slm5(,޵#be_ؼUe }YݳkD%' oQm:v$9+ҝX:>$ů4&vӗ+Xv 3,y% /Sk @yy/ɡ,.&= *eUrβ#IKrbv'וiݙ nZt #,.}2DzEit8ul3Zt"BTRШs,R`٠GRAʲ&Ll ,wesӷ؞XY:cSJK!jgxcY@jµh-c1.l262f|@%ef֨%KA2Ftm~d2ù{;t4|x쪏#<{uoߙ\Kqr4o{+]_ٻbX8“q<>~2;Cv~r09]?qvzy0.J.ҟx޽ԥṠx.w ]~%;[S\Oy~ѻń~2E`mN.\_ dvkaKN~3l?NȥN,|/G?Ǐdx~sO2̓}ncL\e6¡yO꧿>q375mkl.Q[¼)xJ^#@4iis9]߄: 7d$` }yWꏙ/vrZ3L~r|<~RFak.}(y"cۭ> Σk&6 l/Z}x:P!p u$X*Z(G TͿjU{KF)O&Ö53?L`$@# : ^&%z%[Q,s^TTkZgKl]E)clANm] Ii)b@V״+ YV`%NXH)p*UN +kD Г lG\ o4 si)PR ڽ,[x:q`Y**́s5QF P,|Zy_??*wrB-RC4 j^{-G,=˂}͵S>f6?b$lW]Mv5 -'aaCӖC/JiK^o58PL0f;HA ؝&#s^A 1AQaVgO`Bv\Rln5> zX{]c_.Rԑim챁&!ctZ:gW1BAe.  9A|90{HlL$N4dٶ`w}QFE8ۥhdG։_YV}tV(^y%оe"OHPxJ-1nlvNfnyt6[-k;em3/-k閵vDNt)24h#QS+#9~SȁoWrYyRrB@hSg>ـ"+&\ʈuzTҝN4n(U]lW3)`NK|+M-|z65z^㛿:ӊ݁-&M;T^ cZ%WQ4 jt{xğoytWdB|:<7O+'zC3r3/wJ-̂,~w޵lNٖ3qk:NOj]pS~U*oa蛏a}4Ci8{ d*BwJt I)䮧h=Qz#x&)iD]n}fkDv#Ϊ,Ө|\ #zݹ[? X+uQ%+n>{|>iYĂ\/:+I.>~ڿu(AWfh/)Fy*+P V]E%Uk9ǷenQ׹jSkeQVcMڵm.-<́NZ8ȍ"GYQ((HУD L:z% klφ "ITйַ%%Z!# RIj&vCtƷ6NlI~q9~?>fS8$/4gyt4E1\] /Ohn՞U<$U)HKQTZ[ܪV1vKc&Ͽ|2eg@7'J' 2$G$5^TO6,s-KvJy&&ˡf:UIFb$Pbs!CT tks >C("Ӂ#0^ a0#ANEF}@)0PC2YGra\OV:0JJ6P3Uv mhv m|{)M0WF:120lX|@K9;o%4p׌HAx$1"]@9:BCmkkІiƷ|NkE'WrRw -{0-Ă!FYWVlgweG<[Q:(V+ /+^Y5 h#JL/|Ё2S ->dZ{CC^;jElMb`$kŢf5`Y;Bf Iу.@N>DXR}e\2 mhv m|{pBBLF 5jfLF!3 22%])ETA)! RB cm F);,>vXaqyAyXPHMLDx N|b ?󓋥<"k_tg $]}HeQk&Z_`ȶ*w=yrDhKWdV%_y򙧑yKgv9I:v:W#F $;-ܒ@!{1Fp|k>tFf0Bi5w>e3TjJI&.O}KMQ+ON.r'eK15/nC4Ra+jب#CڡBF7tQWNd p#t}gQF힆>eT-j/Ԏj!Yս=0gc@mcnԮߴvڽrjh%jٴ]<ѱkI/YݨݞP;-j戥bxMU#PjԮQ>|HS;FطD{4"F{,Q8 1_ƥ1;uP?]~?<?+X!W뜶Z]A׆v}xm. .wW/9uLmG(uu0R^)xaCb( NQ`/QKVDu9UVב;X'y:9,N~Z{yV^QݳD'ZCߐɄ,[|Ƕ_| 2L(3ѣD2Qfn\ TJ?b}RJ.S@N9E{[8ˈFIkćZ7bDє"Tk\4XR$jSu#RB0 xC)͖daJz UgZ5$_]4G.VYvF>g![p*^H[Uf:] KX8;#Ulm!½o,E3+0¬#tڣ/kU"$YD-c\^I "E,Cu Vd|\I0d15A5Iũ~붔9fF ~t'sݰ$-f Ӝ}д޼%pfYeE#b"$ll!%2SfVKK_|WgYX6=詍DOm$VSnL4Ɔ]x<v$e]M;ȂjU KUսJhIY@^Xjh/*k9ֻFV+W >r:#an,z;;k)U2wR-X`R) …9eM3WֳLd 4` n᯲'07:7X6, YCjj/e :ej"kGJZYAzҪPڔVS)~-OEϠ,ze3( { ș9PÅ/"/_LZ*ITop1݅D/#"Y8u^ pU9) "^ T <,c*X',1 .\7R'PQgZ ) ,W2jbVqTJ c$ T>&㳬biL e`Kr'lVafIJ I}/0dPB16ୂQÏ#CTucXu|Y{`Պ=ү^/vaOS%inokWk|n-ޏTo]?[OBor&YvT]&7Jz`;X=wCE*Vs̶9G]E"/эI}O~̊.._[V{:԰k[&9S#V%Z%7dZ߽k.CɰTlU93/eK0)Oꇋ_L1%~T\wfqਸ&.M k/no.Vl[pdn}~ŴoO g$XD-l6^rj蒀ӞuQ=C!TI%#OId:Yl|-s{]} B{}V>˶:|'97"TgxʋN0I0ѓQ2zbKG%Q:P)8f#b+nSWAf݌s2b[ZAyx-R~W' s *T(5 Wܢz(18 l;Cq,BWxǛߞo^ȢbZ}bii]}l2~ rʱ-5C-dTO}C.r*LJYZxhQmz=:N%qxZ%1 3Q+-c }w=x;=g#nQywD,.RG%- x}3E #X=i6$H6皥mrc.B+[")IL&G_cB ڬ=-O؀2 ЪX!t?N붗'T`'LJ2w~k~n0,3$lV@ ͘e]F ́PT#fR= 1K\0&F 7m7|D%oF Ǣ~2kv.ר^ZT8*X\2U/ 7,vjݗ$b?k)v}"ho/ybWy4' bz7h-GpUjt$RGXe4cI¨X.qt&)Y.ym,ѫ  Ҩ$;4y/=Hۆ1٤Ժ Ήk,BK$gszr=,YGtˠ޵~ȇ&ǿ*mXBm$*<<@AcT^=LZd @ԭ_ )wVu4}V? ~І+L)uEؗ$^&E- ]0Zm+oet% fʙ/Fb/mL*9m7Xñ_/Mʿz6wj8⇴0OYg?ÝYx8?}=ܿ%&~yo=LR+Ϲ^=N 5z{W26/^,w`ggXy"ş\[K绫Wտw[D0ʬe{v[ΒI:m{qՍ8_{b%fҺ;|Q@O;v5&Y\dz~Tp}}vwlktx(y{ F/PXoOΑo`Vz[ 6v&9}MwJxbrLI{ <xz4@T`rXkږ9cq~ I}cGYc';t#xuwVI#vvK;̸2w(h(h.sVQ҅+sj&qenݴ)jݑGPtJhg{HMi=ˁhwzhg>{sصk)a8F_ -a}%sd+({Jh)h;n]TΙO ZL&];gAصhwiS ;Cw)ڭ˜wA;¹'5YM;N;Ξyڠܔ桫<]vIqhw hcnSb% 6 bPM蛗]cG;㕲.ϐ.@vHc|J_I/XfTz+v7˹uqG__s7O-5ˋ|J (~JT -ɕs9Q_|蜽Vڰ׋sU?ܬky|f%!U0 )St&kvߡoDߔ9!gX›IcjwKi+aV-)7jiͮ{8)8'ȗL\*F\\yx=@CA; 6eN* m˜&VQp}> ;vC'TT~׼`9k4ah[gƁ졠ߐ9x[S@ >%oB2S";{+<-z {2rl ١;2gٖ97҇?nq8h)E^vJUF҆HvyO=ͣf_Kv-Ïv*G I6v/4!fdwd])Z[4Q g@vOR#ú!sVQn'Is ʍ$vy?|Rh YIcke> r݁Hʸ!sVѾݒ9B`٩o@)}֍7ǎvF擺j1M4:%GRƁvH $ ;Cw7lKW?E#by;rC7UDv V~F,ށ&{hb{>S6N_?;`1Oջ߿>g෣21_#y_.Qf?^mF7!fٻcZsO#n&3b1gAwJ|\ 581Y3|[bkwi_xnb5$ﻤJ!RoEI \no$q/EWސoZ10I[rlz+W]>buI  y+&"yN2hh\/_ɻ,)8nˁnxIq Б,^\|pǯq:Pt+H0-QLNϾŹ@|$.*_\Nhax_/{v{heI?}<>:s'kvkgſ&vwW07r^t"\M1xIɽ9y'gY_|{=UcF7׿-}u}̏[,f!W$|& ޳a -kV㎢v,m6Sd`+YdVoY}f!ȤjA1_>)F"y`1P#Z!P4\9e7ۤ91YiH&nnD|m[V'6SOnjTnOvvi.7ig q˾H;uK;dӖ;VS! HEM}ԥe7ŧ"p;Ms i:j싴ߒE}{BxN.+6#Z/[=UY?iciwX%d9u>DWYݢ;$ڧe󹇣-BCjaڗP?|hH\qiɷvҒ/ג/Ւaq 6ncL U%)ѲJc{Q@I.+λ҃x;Uk%!66ゾ+\6anl$Y]fWK$ԙ}2g)6+l'WJ x{no4;|$0J7ݮ`E5LsIȲqfLp"Q{O9)A}Tl ~,vֽ,bWMꢧR`]Zg '6fvvDN)X2-{w$ 99+& .9OJXY}^`#NZ/#F4:рj ig.ݏѺ w||mm*SJ/'%^0oG4w)c}hk9縏vnp5Vץn8-jN^7.S5hodZ؃%%ޝkk4x枩gfJsJGrt9t/pH @ZʟW բYٔv˺s|P[9,9')dE` fx5M<I"Q\# /{!p_ސY.!-~XC҂a =r4]lleetni26Z5GZޘz6Z _הGnĮ⬗)j>Sb=^p! ņaS4Z$Z"o}RxІ@r8WO"G6 BBi,mJr*bfՙN^#(o򧇙" fmX/UlCxIF0J` <.CA0M#@,B|*ܩUOVφ+HVk}GXmnfN S>x IsP^ WSFgMQF09J)jU/5@qEC@ Fd]q{9+ ?kE7l FqeND|i& U٩V͉@ڑU@*H;/`;fU=b9"G-jaoNi^ &y]aP23hn1j!ֹz LG>nk d0z$Y1#s> }xG) x7u k0ZDu&L*7E* jg]iZa)rj6 WvS:&*[ 9T_> ~>\! 4k6ɼuLa`gP@|&$/P=T (#٢94(fsI=zq#f>-Gi#n^#iD{$Lh0>sYCvY؛QtKpR nʎYս}S'|1,CE+JLn!c2GD(q;Xh]'D[ ݰdiyLn;N;miz{'w/{'@n0>VP25ݕ!9<+zÈpHu @?t(L&(9 HvZ9)sp\۬Eo %(b#LUXl U>+ sG u.ssTy ]&u]MNA!Zi_PvMUF`uA~많 еK-2$| H|@}3>Ǭ<,8%fN>5P}GO#0\!_K 2kԻXXvblR&40JrmT׬C" G_K]UF>^$nA&&P+؋H{;A=9U8jO(Gr\iY:",F?gMj"rY!zuj߹|iCM  .5i'L:RN V] e&Jw ܝm*A'к}1]i}狴R9-b8Ls!.mEZQG_Z.y?r]8'Bj)iJŶdUd;nag o ,2OQOSn]+r ū7b YXA2q#Lp=a>A()sObfk akMzWJg{n}M{ [qV l F1'YחyX(`S7R1Pm9p]?|EB3,sf|KQ3ǹg;ud6^ONu%7 XJ`ա@dpc;i8X8 8Iiȍ96oɻF5E/-TWF'iV1 |]K9 so c~EӢ 3-3Qa\ z"$7{7% x}΅2FFy{ɮ ȱJFoiP4sIլ9SoH>2z[-C`ZPuDFM$|b !0|셈[mS xP;=?_;/Oe5H:rIj7pea `ji.Ͱeyp RWI㝎1{o#SUJJVw.In$2qi [Eڻ! D`$kŰZ"RpG)4,HbG~1ۨϪ}tϦ>Zc?x.P}^uļyϥ8@'eG6?3nw1?k,>V;m88<;In if 5ӹջs8!=9WElŲI}I{󷸝Ӧ`b*P7-sw[\ ޓoyEx Wm#J0Omb pOf2x_,Ŗe%Ֆǖj \f>~dIB8>HrtSធrqO_{JFG*-0sɐ-QiSf<}3s9dgܜ:AsIBpד ]ܷ\цߓ~3>Mupg' ŊcNgcgˑ&o$G43bc[B(T0S̵|E3*A]hR-~݈wO K|p,?p?֟ݶ,\9-_~7~v <8CZev}..n֧[K-`dmAySץw.;UNYS N=Hv BPد$ˡX?bv@7hK7r)7eL+UO>"1:pca(!}.|[v { s;N& bvڍ`,W:edsvl1 mb^PȊ/ԮPgKP;_7a/E"hTEM#Hj7o,M^i`lMRVmz]-Qhja~X$".%@#mO ӌRugeL$|N9V%Jb*ste;AtTn=Lzf`H]I Zh:> Z?Ǽѫ6$VnW*}eԕ ,!Btnq|\ӂFϒ6٤0tԉEv$czf9'zʼ?,D̫^*B~>E [*VƜH^E&MЍmo `b”Hr4p:>GJ%#{,+;9MC>x禿s2Cnm@cTNܜ'$|vM%}M)䘢M'_H`PgUے [>#yƁxuľ0_7 ;r 3 P$ ax)9NuQpcwS)ol֩o˭D׉6:k !r+uͶpNqKrlrCGR &X2LKrQR9yk'6˨bRec|b0Z91hrjb=nwh2&|DGF}wA!As' :4=jlmt]O}4Ϸ{88v>0|rDCYZ?_]ڦxSƜ4*_Q07#<]%9g5$g01Wr>7ʣ-p AhL7J9܂or^o318Ϙ'蟕 | >a?7 sGӨې .a9dC:'F"S]0mHbwR] FKq+յÎG~dպs:[RBD1(䳝In!0 R>r9J3an١S3s::%@0J8Fܜ ω\wQuVO[k$sFrHJ\yl$:t/93rHOb* AI57FTżTJD-Q+8Aݎlg`1'$P#޹4?HrdNϰ z돗w)~Mm&5Ɇk1?z\nݏ9sˁ+旛\A#sqFj>U+ƵȟM*s:Zd''#+.hӁj)ɑ9Bp-R ˁ?0eds:0"c'iiNKr^ _~>xߨX5Vv{`s2T ȩ"-lM8)2Xrrc]*lO2L!6;~V_k pN#H`/AXs&@bX:M0_JDt |zʧ%#0X]vjƩs*<_>ܩjWv #CvrH(mr`Nr7M4",XB 7rN5i}GJ ҈P}__:u*.WD궮k}:6B_k]YhQqr3HH:i %N/.DlAQ9ziRtd: )[L(Q_-ƺk\)M7 i%ncwsߨU/ӚLHW(2Pwu^kٯ*8Q N- q#w ,J'Ӄ mseڱTQUjUh nCZT]'зڦĆ! n5-b/jV6RQYTAM$NtUܮ˲ҊzZߺ!vaomMCTlm`/.zmO}>3U b,YrgRQ7ןA@YjHl!CLMenbP)P64CkEG}/MԦ&JtVk:ͪnUOt 4OIJáBsᨿq=r%!_QN>I>Nwus/+VNqW9w.7unb[1VxqDa<{6,}hX :Ԏ1,L4ጭ|ֲ>|noӗu!֤0oHip d:ڽ'b=p MݶP8fZ|x-n[%Dw_ޢo0 _~uP v_¨[?9*UA`Ӯ?߭IDb TȼHI9Hy̥h䎌a4ܑ))!m| [~V/3^LqM}nOkzj&sYPymlq9;%M&K9nΙẔfQu9B[#7s::fj4BsN[0eܼ[ZsY~n_F:Rxᛂb.g-"=%wAS8nC.Ul5w?d *s"و#'YO6KI̙VtKvt9vjnR Hkr`>&ۦ#C̎gn~N/SjX{HrZ"BTPp(ANRP1|3R S;_FpN.G=âdQPv'Hq aH+-.a8 Rᬨ$|KD~:Tm)6^4X1Qcg 2 "Ḯ&+߾~Z~սBu7_V`j_1J~_ӜXlSHk"S,TpN$5_$> q^HNE=&q('~~J\ֶ}3&/y8c ̿͗Cb)޷f?KH*DJˍm7 P|73RtN>CRBm YPg(>Vu~/T;n+;WFZ+᫾Pch%ej`:o _}|5guGXIQIt<,|GNLJQa{3k׈4jJ \<]5oV+xb˻\ΗrQERl뫶_dݲTv[߅T}c:_5J:D[b۷U3mצ?7n5;eWa{am͑߾"͏./eYhѻ:C;=p!MIZ<=RIZN&XTrŻz4*NnW<|jJ32U &790A|xz}4R))b9!b9HŭWR?  tsI-vSܣ1|KƁK32*ŭpƠDym#Xܣm>q:B^ 尷R.Ta5 +n ̸qzr{ >'jf1w,JR' @{څ=!l J5cDN7a.eR$|\B$j7CೢvoLɢa^ H%jWsfڽj/Kڭ\r0 &̱1BFy]M'Woo߾_?.h_w5sgexvtuwmm|Eꋀ}". ''/ZsUhSSxT< H=&˿; wwCI4oksm5ŋY>T"IQoucn|gô 篟`˲hUnI靗g2Ͳݟμ:y]j l_?r~s;S0hd ϛۇ-y~Txߍz~&v%C z; w4ǣ|>B~&7Mܲ3=u{Ghװ=נQ(;n./s+4v-@݇"i1ͳcE(AZT'a(B* ZϧşooVNG4]I͖ίnyd`ydbVGtdUԚLqlsF-hz|>UaQ5&,iy/b 6^ӿ:D~L+(N_>\?oYPU)èĞH;Md=eZ{SJ.yg˫խqq]'mɖJ7u \t>՛+h{'Cը6 ږ?Mj񖺇5$ϝk{pElGfV}N6bxžen<%}g ^ԗť4SY~3Y =IJcݠVPz_x5  c"E1;4(֤#m"Q92jiMW &:>Pe Eͩ0iDyRD($J-H%lՖ] &o v/CD }MNc <5ťl28Dǃ9P /fX[!y&#cPHUQWk9W >,L|6y9DOOZ`t#YtB3xgvFFmxǷ; Ѱ>%s:D $9]Y[iOpU-l "kuŒ:͘! ^IC  ܼ ֘9P`z6O1$sJ{F,sk,QcL1 P+,l"0ro R}SC }5G'SRBX.x CEUvX'*m00s.t|rR8՗^*wx cgĐr_]#ڏ6.GeSV C6sl $1sUTD8qE~5ofmxK|5xNs B^ZZ98b-,*Xz?MDT@H61JP;39mv߁Ⴅ& /&!>!N\ .c< `+My0CZ[0556 V}0TЁ-ܯHzLL0vAJAlM3۳;&a(%ܕ3$a sKT vUKZa*@! Fe=9lO洅4ӡ!1hHT- g)@>5!>wh1$P?:^aJ i` ]UDl(eTpoJ3`jnFS0y|NEoX (!!ެBe,iz1O7V~w7~Wv~|;<7O.IE\eN .c{az*COϝ$o0Jyx5M{aZ?$9ܺ7o@Lط 3x1~9ZƳ":OO xw>y:_JzmSb[aEybD¤l fl!8rJb[˶eݥOIK+kIDNM2Yb"J:؇ &s+ֲT|+XöЌ *xW`Z]$sھNù!glu1ʅ@xqЫW"/.✏(>^Wz0F ֖n V4h6CgҏO?y&FU\~(&Kf/"3 P EJ] f¦ .\' 2gm?< Oo"<6YS !:Iy! *eVN8vwo~6,8=[Q1].닖t8B^!&Fmi|bei;e.B@ڑX*(` V~J.$WxZ-O~6l,_pi(>\N!vcߨ{[p÷|vt6ZL%iMwZ{ʹZAj#tOFo!ʼn#;H&;Iaa$SpHꐒP  Q_.QUS}V Qwgu=t:##_sEjU~h_17 tW_ Y2{~ r #(Vd/Aur%D9b@%R(M!\8'7{xuhv²kTF]`w#Thbq|6~#OF <coq"og.D'AV,81 }a"~kE\MT;᜖fj`77 RnĹ!b$G5:( Fg+_ qM;DDF0jNTP{^QsodL`7]KC#y ڜ,~__Vjt2DQmMm5&2ʮ/g.SpG}-bFh)&D?5 4V^'>f6qrq*Q{׌C.3&0C~׌$K5:])V vE o=_T 'gl|ժI@JT!L?x/>$yU8x//0"20JL( a1퐒ܛfk9׷/,f)ڠ @FlpBdܒcCM;ziaȽZ I s-}(7zk.·˼2/|̋rre :є;ޭZ䘣2"7yiJbti: 1ٸhkEr-imu(MjvhVlv[oːm BcChkLVTgWoK,PFAKwXVCHHvt$nISG6CGsdsp[!bx͖b7HSbd3,^:ƅI J0)QB*O>ENc]8/xR"Iتu+M3ZQ8-u%ԙ_Nŝqm1!4N0O:ȲB{s {$3+!y.sH<)bG.Ol݋(Q Y~S0"*?Mg|K1o" 75gQY:%*ly00Q:Lk='5B_ kW84mehl癌 gl7i)mq$2K0Mt⋭BYIbeUBH.dݵDc?uLv^?5?MPRɽVcضZZ gl,hptpYR1"ToBdH.K'IC'Q*B^  yźtgs4x|`-'};p eq  uFl?=@ Q}dbZ{(Đbc͡ , ) ĴP}FlT;:%Cme=ݨmeL9+z9߹~,AtK]DEQ*͓BR!H6vY97R|_0EƿIJN[՘NdtחリJ  |yИi,V Ni@<7TtcJ2!8[i$t vuiǭf4PA]bvzA}Mv*UJS̢$iNcE\%<=)Q!xpf36yi#62ױɍir_O@UqbPc,ՔTp$, :\ŷg`~s`t;4Z+t$A9)ѰO8*g[ޢvtH^]%KI2/[zfIʣŤ5?CAط!"lA)J|*?_Rpጅ2TR0(A{$Auq7NaNaQSX߶nmV?F0zbѥBP02 όՔqX-$VJD#)z|QM6UU{%""1f 4O60rCs^xh>7 _n3CT )zapΦ 'b"UJ e8p:"#Klb@)s ;aZs7~~CuPl{̽Z`}s%j}NCKAϚW{y`x6js8hEW:æ}Lc32! 1y7x{~|Z>F ŗ8c+|您>共UMɧTbU嫚\ilK9Q)Cz k7Pi5h%Yvg B! BCo= tqSδ/\S u#eB'j&u[jN_uJ|)<4?([#+{^Gieėz`P}&v(.ۡ2NP2$WFH6ۡ@5}қ fH%gunй9cho4F gzz6W+trQ1:F ի \|" L :y4I &rjp&j"QYFr"0¢Մm;ju5aOVv{)K (齔%h@9pHE$Z^JoJ w5.R/yvTgV3A_>D쀐{3m|=*F  8υ ^ ddadQ|%R#o=\ag\e~Hfwfbʚу\3JPxm VO%4?pWktwR8d >wL \~ G B|Sѽ?Vqah=O .8Z qA~=ϳOJ^a''#ZNzï˥pȢ7qVgS<}u?$mP( Da/˙/7]<'%'ો嫿_ۜȾ֋9wQS*5BȆ>AZ?[t.oWX}*my\ /~z7p8l5 )C2epR?BBs L[Z"5*uqE Fc5KNj!"m OGw77v͸JFlCFJ;d|hs:0R{IJsZȅ_ Nq+XY 8o&(8:v~?wh؈_4"|QShONѡbDhҤi412ꍲ>`=.PѠ8IZ$Jj9=[\P D Z b( 8i>\"d^-dt2>(.5LIt^\5lv{2h} \lsr2+ʹX8챝\mYzpa^/ls_ n_]Auk\רַ d  zG9'r{״x(^{4vgSNlR?}d-NIv)*ES$O놲kd>SfZkQ̣GY3*p!QF8iD?K&]5Q ]0 D>ԶVv~2Y3Y m\Zmq`hS4# u>y$ECiAPA{"4R//튴-R>jQ~EK'xP-[w(!hm]or:ly *]mH]ۤNqkK*xk5Ӟ{oS.l-!|E 4EH"/mbo f bDv$0kmoSQ}NH%ԋLL&γ`TqUh{5 萌іOzM|$㊑kCw$Ĺ(wwsy!"$&h #C5pWc93PN˖&zHKkÖUvزmjqlS}v<yBHGNUm*ji 1jydl} ){u¶aԆyH U͵Xخ [l=Nc4G~o9{uyIkH\i9r7ߖb|p}5oiQ U}fbX遺LWbjQlP d9\#yAiKbPSfly&^6?$U9Lc9u!Ξ=kakj/zЦp-*^mL/KdIy f:y[;Ҫ!-3=VeupR-;܃clS\~PHs]ZD0LGt:$a^&B˜&}E?Zܯk%ܩyU'pvÚaqs3֪LU6|AJ^ìOZ:my?mɉ}RT=NI풢tFt铢NN몋bͥe&L岶H>yz m&"6 P T|tզAQ)3iG‡Lrowˑ߁ltqG$?V]Nټ닝׿UE{(^B>5彻='Yg-nw8V@,3g߳3ݻN!>' Ǐ='rhcL1hЭ1 }22:x;ͭ޲|_/K603p;]m WY~{&?bG6RVںw;)*QEte~$ [ki$H:{jk SBVYΗiOh.+v޽>Uʩz5XT*lt XMٕbY)w'sH"w;ٲ@Ro05gg #qCpLMp̐H*$H*  JqYT͉fQCQ4|~C0`҄,3z$K'A@K:PtZ-Y~!2jBW9-H$ >hsp ?Dbٸ8Y+<񿳙4"ϡޯ %^Dx3T%p1(GФdՁ!$ϡ#(g$ Dy7ǀjr s| +HH ^ Y# T@3 ,vC^^m44\qf.4]rx@(*J.yToJdw#ivLh>A^sr:3.‡e~ul=V{y9MZu>*smN+ E9guXjy@gq+G Eg*m>3ky}3잨C4%UTSw*J0tOP CTD9:!л?BDOpIH0$zi>ih;1@NCMZ؝nڥV ѧ].eκ{ǭ,j8\_ \ҁds)1rLoM'"\sdu<]I|')\ŒZCg3(o ^sNܷuUTUTe;/aj3r0򈙋H9OvtlQU`3"W.z^BN󰐮dEHkFH#\gjԆat(&E@h:;&aU#VRSJҴ?O+ GA>t0lj%==d'Ƨg>bZ! & t*_α।RxDgQD%ٻmW4ǔqh&ILN$-ܦ]PdKֱ(],XTbFF3$RYB5t[a iܻ&(}V1~u:"f?~5kCX >>T۵\-atǗ5(8n}k`҇<`F(Y&+{hzD2ە9YT@o0Jؘ\`M@V}n!d:(Y]Фz۷RS_p]26&ܳcК$FãYZ`)E⌥1I ֒li#kWR8Mh2֊rs.hݬ@uk2[؍n»*fH$F,ZFxv4`ʝi]`Z@ޫa 7w˗E'OIUKi.r2bZP*߶Vs9';XRO7$KrYr$W:Q6!_k#q%F'pqY{i<;lN"\IxDǖRyG]kͭ}>k2* y*ƻ6NkwGnqyGI6`έ {@vhs:#2f#Mb$f1e]ks'כ*5Yx  dŮ&8dzƅQ m#d3p'o3ȍO?\~ֳjd4]0Nʾ1. xKնZơ VLqkgN6m=}>00OԬ`WvTf$Mw xYdM}UIA (gdA %׉@?ם{IUv}G(*SIґMSju3Q%9Y2vM:M])2OWp ~5Var#E[~tR[ʇC}&wiwOflt*$EG4h|Ko<%{ٓgwM֚KɌo̵ͻ秋,@O&e"$ߤ8#Q#[Ndֲ77Vɶ"ݎ`"w,%s-# UJbQ*$(ElTQT$m7hs0 gP:AE% +NeSe]љ1m,%0K'qUhzb7(/j21Y,TF)&b(˓q$ReGellXey<7`J-kIB껓ĕɣ0ܳ}2kwTj&{tR=p;hC[xn$UR PV9Bz.ɰ2LfFl}p~9E̹/"bw`=Yr9񽋀t"%q|}gX(F3%E(ͲX0v'?Jd$)l7 lv<^OfÞ0Vbʱrl\Ӂܓw2 0S;1Q|ʚs5{ڤzwD4A8QTU@K6cXKr89:q?3M:B׋e(嗌MtGmE3wtQC7Dž$mKY'Tjc#3#AGkb݂\5-W&ؤw'[ysߌ$o_r _HA}qI ny7.I1fE vr_2"DHF(OLU t2X}}Q?HYɛi/aH1M),%`~RfqxvâM2\/R'4^ߐٯ. !2g R59[CTy"Z손Pkn` BZ`O2X}7(p-0ҡZs&_ڊԵ /)z Rԥi~vl\i$օ5vd8\dD<q3zqѷr+i4qDxQ.qY 8Z6gI~\6VMpRe[ P4hRE?,߆^&)tvSq}gn?UiG@8dw$w8K'ӓYl:z2/yog=Nϋ{w0Q?|y] ?La`;ci7.n;Y~+\O{w/_/.>TN*V/@١Nfzc{\LwR8~5Ko\\gͿ=4L~4wƕ-М&g|r٪I>]iԕO`,'+GG{9:3Jqu1,/=Vŧ6wśf]eS{u2Y~9La|<}4A=I-vS32LQ:?T?}}`~w / }?{L?v^MOoO.~oS!9Bm,t'UgJ*xtT%hOQ~WN?-_1=üzWL_ 0~3H^CFz\Š3-~f-|kMQ1#+zX/g=XX;w,v2 MiD1Q$R\X$b'tl֝N0 `J<]΄D`ʳ+gq1R"w\>n/|M3'C`aDb{{To֦a^?b.)ۣ + e8٬9K^, F3+92܎#?cm џ?Gd"E )idȈ ,I5Â}c9uەe]0S0̝M╃w/T,sqF,kYoF#R4'H>Ƃ LMR'ݬ ǝ!S74xTz@K?xT VA:'T% cI"Ť/}:0jܪ T3Ab_J28񈥂G0e2#R3N_Zgf+a_qMwLG]DCh(j3"*{"B`m6{T *4}PM74@r?RH} wqp/a4GP)jt( c U }$oZ ަ&= ɻÈb$*SPbc5uEQbhf-bslĉVPD1ZޔƳNWT6}QUm1M0է@r?❭ ȵʘݴw>JJ/TR,,W=wpZA V0Jka12XKR4L0hZDZ`d+n(oʼn"5PҍrFV0,/8K?!fI1vAA֡n AIX[<=H-9`3:[5]s999UYjK.K9GSa$4Cʨ5dT!kI"UP 2yJ LIư̸9փK?j=F|cSL.\vOlֱ]9"c"F^Qz̲q'Gwxu] Ý% CÅxx&dK$d4譱FPcEk9|"bN qpkkQe$z`4` CI,H2ŒKm2,"f.sI\ py+zۖr/g`I>r΃~V:͋چn/y Lkጜ=pT=Os[|wADDGòuֿ]A0vd&yz⿖+>Mό'? |f8`mZsp[uo3h;X =\ f nZ֫êϖaQD<e-oW!* A$,[ | !R[(nzRZ##:](Sv xinݾhͨ m~%.WLk`?Ֆ%#l幅)07Ιa5tlh'L7 U*C}AGW奷G 38 z3~b7^U+oщrQ z-5-Y[ >'5E2ArZXmwćаۯɟlSG}4^L})jdRP#fB?[]{ۭEwP~Gǭ)l$9Qq=X YO{T'ܵ?䑡-ƈh`Lśߧ]ol"y/+J]qX fLn5s7*5 Q-ѝ V1u[}Mb+m#]6WTri~{r^.8RS zkYI~ RMq#d_h4*-O5I3( MEm$.׈K& 4,F-hػԃYr N7NjKWGkdXˊɞ 3L2Q`: ?8h<^]@cN꡽098k R`M.sqq,/'T/ď I4m+ { pnۻ`rl9s?*sw]ow.+FyMgz4c DMWOSϨ.\60ɯ8]I716| 6욈 ˊ˞ZT qA *b^lQ)p?s aIY58-D[s]sc=q ._ͪg/pi?݇hK2l<毶ޜն}|*o39off:Mo6bWعЀ*r=s?M2ȷ ;HѪQR*`6zsY:t<|(s@9\kŹ 's5UM3hV_(:CQqr3;:p!M9R3'p)J-Wh)u`U4t6mp(3$ZFAG;[GDwA;Ǩ-9!1^r  k!h^̮ ㍛ FaacMÑfi&衪թ;BPL_!S$ȣMsR%ڿ!b|9Luk6@ +[iSZ!JYPL ~SQ,v^h&y>3?\l-{meϼE=xd. S 3 0RX8F\<=$(Yy| VO\!CcCt"b"|?{J ru}޽ F.8ndf&IW,c 4YYZHp$6rǘ+c&Dr\YN sq4~؃>+eg'J-}\O5#{(s-Z:+WX78hhɤ\mڿ}Y>͊}ܾǷ˒mza񫬐Je/{\R'.#YqTmIxX TiiT>E\ bUS;T,Ep$KNdg!aq:ZX3CW ȏo@`5VU=uTCJxV=x!E6#I4g4*wYKKhcO94B3,t7=Q͟q/Nzo j-:,m1G%ˇa#P(u|:@C{@ M')1}Q߉wlg-}ru˯t~fSu@ YWu#%``z`rPLLIPb,``W4F, ,ila+b{0S2"ǶP%>J1R<, %Â8bX&bbP"{`]jUb.j"Z.jg=מ5遣 .o\PQ/m4(p*Y7Y!(Q̝@q8q\ջSub^mCu6,|^s?L2t1Hp~:&ҙ{"36c?vB=קH{uWH~[~fR6EF4;@x) OoK>msw{//(7k'_L{|W70}mf飳 '—sj2On7;~TVdzy͛~??p>٤h}!/ BԴ`|#[Dzj#|%c׮ּ1_Ph@ov?:dlʓyKy јkwn<#X9蝟G(F}.qEpaMK<5W}w6"/"V;MQjH)-Ǧ]n2NL/=q c0#Y9aHkb)x"3(O25u#& ]6u`h,X.,BxqT ZgN0\]j)D\2.Xu!Xy%1 ؟̓$ց:yPi^t8nn4iAylͿl'qWɌΰNT#Sk!E&$:A"5r#PdªDj?7dT>;yRHBaQAΛBeb~i2H$%9˟ٙݻC;O\i&ګ_eA`ًcatEYc)T:~F908 VM)v #~UiAX-b^4{7/s}rVlQڗ6yLY_m.E-/eW.:ү="zO"jghċtM;1!B( ܎Va8Ui(P|RQ|k r_<=vƓH/v% ^Uy$⣻*g. =;7{F^kF> \Xd޵5u#翢b%UF7mnC*ݭ<$NpLCVR3M9F۹su{#j7l(Y5Vl 1OC֧QQb2Z5ՀB)MʶLUUB`蠨2#jww4wwZc#_)]/@>`/=" !cBj&k Tr&1WazWP>wqfqv֙M?#ו@:*B84yε~ca: t>DYjX0))!W+Z-:W5-ru徟[R?1]c"@]eS:Zr;ZrNε˵AZ+Z_kk=уG4|ݿH]OY4뮧q]O[i0|F.p{ٮ2̬ w$c _t _C)uzP _|ǚbNRf˒_[/mν{s/ +YjCMAH;D@UFeTou(01"k炸SRcJS ;uV٧YVK[vNJLz2 0Z,UgXc!4EET*>)Y QeĐ&̰ٚ")-PZ3+VHiS^&t6rnO#(E˘n&AE:vXdXGDAgҬ.N^XAn&%q}mٍ)x8j7f!#lLˆhsuVJ 2nԔej n<mSfuxTm(-?q:*.1pIdJhdPJ~U"kafE~Ml)xs6sndܺ hvYO7}9&H_d۷{6/6Z5Lb]CWV*hm"2xGè~ ejKn"ϯ¡,݀#% 65cyOH@Fdq1*(U5&I;ê t*n*Ǫz\*.߯_gM#3Yj{AW/!AR l~TpPTlHx$$D}-YZbإ۷?Jb5p#}WLU& )笓puQjmd> Q XPK$lq]o.A7JsP{|LϦt1wbqNw1/Ot]`!ϜC:p֧., D˱KɱSI]]VrRr$$5]}~9v FPu9Q滏06jGխ.Q&eD%6;ɄidYn#K7NA)=qIxq (t{8 9(>&l[>a4v<Gj?AhyH`3=i8Fx:sɟccNm )ڬ?o4֦mAL`h `KprH$Yuxh'! .{)YULW¨2pVKt[|bcS@j;ފ^z,H,1 bcbIH܎KLHf!`z,d,1 g{±$<_XBpb0=wa܅)H, -0s]4+s棁_6y܅I8+sf!h/mϬehl6RJJ^ &'a@%ɢU d[ot\",a@{Ϻ85 o͇.,`"f7wqjS+ذz)vFvqj>| 3UkޓV rHH{7\kp8o//0PzGކzߟr7f[=/ogeZφ.^?wz~we}w{ߟm|3O}%7wWRU,go>w nŗ?b_ [|]ӟ'oݜ}w!e/___TmQ }o"`W/5ע&j['Y ]E4&,[On?iJgݍIpYszZ )n'>4W],ĬƊx5p"%9W N!hiHM}I,Uê8)R"FTeg}DW08tI ;2tRuEዯd`e=;t^l;46;=l$kj1 T2~ kD2kKd<vPrE!=ZvxزTJ(E&RrmcN! Ff,<%cG[+>y wC{<;tZOj#ۣ-iUKJB% Ցx#S(]֖fTq0GL'0 ("VGlȵz^P,D(_ꀍ:I![Y]u冒.BדI4&$ G!1[188a/y#(G4+! N\*< Q7b٫(nA\ĨnF'ɚٷ\!kWź}PoYΰ5+K˄amAlmTx ec$AY⧍Q!/zN>Z b63 Ȭ2awN[ժXTLf@rBđQd$EfC77io6]"YF`eR]=3y(M_1ifł*gd\Z/+8HQc6 8ǐP?!gJ7̴*VH,|=T0Hl^hp(XuXDA$<@׽b"^̄ìzuש&(Q8pKЙ"KIPʼ~aA뷂T& /QWA.S 1 J8 ,seԸ‚  jYb>(EyJ/(_D+e$()Pt#Z1PavoRR"w $")Y.LblcS R ( < #}  Pe1-Na( ANZd,رFyH[IΊqsYȡ8I`>cHQ\6,Vh9r̞L i&/ &JP`nA kut~b:Ο/l|=}^]滜+3Jb%  L\Q5G6ɘAبT1yT(۞ŠPfuh2꣨:.J 瞣dot=8n0U:!}^a+^mျaQ^#N@w!ErBsOŹ6E*}VX2R ;@Ti#Ա`Oz uT u@V"9̽spO.DBkMD' [PQiQ u B&UɈTPu} l2_=*dІe+804* `ci+70ڏaZNV=9K%GE{&pO` " Ak'zy"oY-9|=([R։|׍7BTgp8VH L)/,0N|˗gWu-ۋ w~W7˷-5|(9a^C^޿j-:omv 2K4 jj,h0VccIMcc\gWژFS9-֖7"Ms1[,*d匿_pv~1!ۛ4ܫF~tt6~J.>Ȧ.=Aȶl;.vgdrFȶ4<|gdk8#{3w;Zyñp[Fw$6~:ck6Tw^^/ 8K*c\dq,'(Mʱ:nVڑ-Φ ~+V&U+ylj)RZ R~)ݺǵ:FK۵L}-dl=j+_G z~?.Fvח߻~߬.,fSof5,دϱTAh|5[cX\n~9ΧWc rP`S;OG9 %T[B1X.UUM~GfezjG]?ua"Fuf~ 7\}hy>b4M?ّm%G}ȐjW- LuX۷zvNjRT> .9lP.F-Q=n$WGP^HtUoY/ |?jߴ{n_ي_m]l]]' wV%;Tu^SSY)PCg4:#Vcv`':&?ez:I,'0u*XVÍ$iLb&'ǔ&?Uoz:IzS$',1If:I,H+7LboWt6Z}}{6>M/bq۳gr.“Y\^d?[^|~iq'l/:,68/0%ڡyI*>4Oi#$& 4Im MAch=W=&)$u4CMRWQŇ&4IidIz&)B)n)7{>gUi LQq,ۇJ#;MF{Ѐ"/vS.bf!3E&IǨJD;rJ`tѥŅ~ y#7*l["HC0GFY2.sEV_z-_^Nj@.KöuC8PXzi&x_<[zdz~G\˅QV^mI{A&OAF\22ˆ9fqHih>36L/=5[.}R䴅bCdu`ҫ9XH!V.J_(E#ppU_( 2A$LT5G.:\Hb%&i1ŬŞmXnwi1F綴QJב>tON[ʌP""t1Yǥwh&5+S'D6h&z0N֨'wu _lF~ҿ^_W775➣, QkĉfDc(?:1KEJTq] P;ܙGqty*Ir|%crE=wzJ)b7ep0K"9T|t'SF?ɥ s8>8 O#3~. 99魹ϋ2_ǫ}ó[T\DϳQHtØgNfSDWj4k>GM8DՇ.ugF~?DVosq)oMɒis>w޹eg\\R 6365 u;@C`-ȧE[ٙw0c{6;w6Nڝ:jvn%ϔKv8|/6ĠbC{ȼ+w3V;Mߟٮ@WauK)oJqX_uo>~owՊj7wn"zG-گ,p&BDވS\OIkZ Ѱ\aFDQ};=y#/[CܙH Qtɤؚ1~m^JmG̵^om#0S"XiINg|Bt5AA%Ϧ>w 8~&Fu6.~BɁJg½n.GRo_%upO po7ܣ\DZ }W*Z:QNk̉Vھ))=l )Lp|A qV_W 䑺/Uҟw?v杷*ySuv3uhf}s.̄?gUs_CUREm#]{rVk+y{S+]{{B(4SٞB̂qZc Gd>lߟಷg;Fؿl.rK/jQT2 }SSu;٭qkG?7 iٚ͜9U3UXEkje*ΪgyqFj->]_UÃGJM, u+gC?:>pTJ9ajlBF-4LmsNýqyNzTBeL~oj! l2@}?:q]r4/U%pC c+~nKCRf][8+A^vE <EvϾ,DIOܗ܀(ն&%S 2 Y SY+Cp̓ŋ4Y#Fy` L/bvѺX1}ݥG.\䪑Kfh3<0ytM"<8:+sCכѰ#rb/s ~U= bRHˊ:-1 ^S*WDVKn.!nΚWzc1՚߳n%JOMa| 1M̀^ سՎ}܇?>7CP\pJ=c߭=8\ e +<+9uwed4\$Z=z}>ea)ALB$ րTI?{FQ`h} MU\٪rR4?u@kTqVFD]6[$ʡC9 vУ] _x{=01|ŠV˷{-{ KVXm(8;i`8Fȫ3U^{n| syffrr}jbuH*wR)x&2Z7_6ipcFBujx0WͪivWW-'j[N ?Fowwzsy _.x/]+X#`~ !?K`f쭂 mh^?~yj٥].9T ɩ[|"t)F|,֛v Azv-hp췷WW| VF8kvsr8 vC-=5Y8I֎x3u*̶Đ>3MsC njOV)R ì9If`.R)gb**llM]sFr̎,YF'%[ߝn5)nqM i}GsZUpIӬ"l}V }>e:xHsBE-a\9RYFp[_ďX2۞tL9r^9͎ߑ;fz@`x8\0h)#sC#[)ظJkv! UmkUC0zG}2!4XβzHa{=l@ ɩ}B'uD<$珤Qy=~rRi3tg}d8[ n jOC8"ɉ{a:$\_ &|l@sR=~!ҍo>$o:*rer߀i}((K:5Qj;i^^{;3FFYcT"czQiǨ*RnTA]gT͹2A }ԥA+5 uKjXP(pvif9**kwˁmLV8Xd95>nc)x*b7HV&'?y.R0a({_":tM"4ۖ z$S)Kw~ajmU{hw6D֪1UCUT_盗/׷+P ׯD1XϤ_@5k~8Bn'H'iN?_|4[l#ѢsElCNݻ =>0og9hn@ک.ٮͬ\WUBn/. ^>6ӕ"=?cu%Il|lώ𱥪-8wG_:X31G8εs!A"&lcMEk+2q`ڔc2r̦sQ8ɞQKp Bhgh Dt396ʏ|]?E&a.uXpɤbyWj}!.h;~3䬛}Yw͔Lۢ9a.9}qѷq,I:Oj(}c\S5Th%[JCjHK2D8ęe)9v4tBoUiV1J0~.NbJOUF(`YѰF)0~xEvCHT{tk=uhF- w@N!Xz.uZp;EվvRsi4N~Sg& U81)Ƶ.m3udъme!<FhҞsУk 4M5(Iu$*(|(^Zz#ȴ}HDWVLGkvNO T҇tsEuЅ/.bPkwa'w[X3dwТ }}>Qmq3%d͔W ȬǺ΄_ 99Ӱc6"R4qlt!˜\]j- PF_q2,HHZ  Ucpe%zEE%M]b0$4`š-be%QZ=6Z]#aਊѰFwDY[U3(rO9?ӳH/6q~*DI3a@>]xaT$h?X8Xh )Rɦ2gOF j|5\ ݎ( d$(cMKvlsn#ڑ?:fCzc:E%Ŏӫe '[>_ ̳S?26X!l\]Y?u~ïɧbE}W_osOrl>Lɮ#i.fr͡^K!`=3j.n-AGlAW`$_  hQ;%0)CpC8z5GCw!=msWpp}ϸOoyb;]UEgW6Y#W\_(V_b êi˴Ux*|:8O{bXº@Kt)BɺX m]c+xBya˂l-,cJ^(ba|[kU8yICZfmT]->u //v__,K."kjhWeZu<|.OFs!VTh7 n?v1=>T N3Tihc}-~cWo)&Jr -G&GXlYKAD.ړ}l&^0I$ı؅HBKƹ\M<}A5q.RddiRAe۪=-w{\R<^2͕>GEJOэxJG/.Wc;O -WrJ?w̻I'7zww7(IK 4`sFy{"QOːP+v-'q,Z7HoM,zb@ZrMѴ`%`WhEpjZnВu8[yg7D=eAh$G]#`fW3W.IpF=L ɭW#u|5eᷜa;+e\sl=+ \8}"M@ Zcv}џ$#Vdu&*N*?mnZ8g^4 R)9{c(LF<|~y]Ý\nSoA'Hw,Lj")c?FM 0'@?֮b,rmH^~XzFw.hw*jٙ?{䶍&[[S;q%>.u-!&Bn/5ٽB.&GD O]J()Oi.dOv|zVɾeeW*USJ5lS2 wN:8-oʏOx|ó}2F= ֛eٲC{mz g&_WJ{()Qmғz…{}lvucVnQ!/ڮ'{b0%톼6x?x݈{tv!b>qWu/;ܝVdh{CliГQK~@<3 QJehM9%Rh-!0NQZHR!Jy,G ~sH5G^]\e\cL,V?_2>NV̞o7|,[+Vn -VNjEez*Y;U,nj^ֽ8,=2SILyvW{R H4(Ii'PITo@mq2 5d0*֨B&եzUIK7Vrb?Ncv \)Z_{,wd!pmmS1Bp)nSz?Qmnݸ6Ls>ʭ~ mXd (!{^N=mf-uF'yDvN5&9!yb7mdDeD"r?u9ѭfnPMk]0([vso ^ U{88Q/c03igS"Pΰ *&,QfO&: "Sh"F=OH4QQ?إF{xpu 01Ǡ= a}?A{HI/`/O@IWڞ!@^*JiD5ȥ,NV]?Ci@U_8vCBtSSO1@=K=3&IXIg`\p9s3@^ħ#qw!as{ʤC+-r#޺uzff%oς' Sy$wm))ܶoBr>~ ܕb3=s1lk@%zˀy3d?qvXXDrJ d3P{$j#X{9S!0" W|U a6D7^ B )9;kq |vQom.C,/VkqǁY>ڢty㑧݄Igkj6 bGDcu778➏vHqq&SH$hα>MyϏ;4 I# cF8g[]ds&$KmHkx\ɔqF0؍WD,R'p֌*ߩ(݆,7Bk7Qˡ;VЛ $U0|6~$m+>w40ic"'K [>dO>Xˇ0|'q)83eF*RY0^,et; B1I4KG,Tq:Uѩz DvqRO{ 鬪*Za.ʫzp-֓qN[tQ.]gnAc1-h#ӻ7؋P\|/CfD\:U^Hp}zr9J}o K NPW}sN bۻC[h%SB 60RfyS Pj7yrRrn$H3@Td2]GB _pE-ͮ< .N}i59-9O2_l)vQ(Th4)Rv%9`āRgfd0Cq[j&A&Ԯs@!aŒNA2D0Ș̮FR"2ciI P@O谥s2Fq9bЎ4V!chҎz^ө_>s;)i7"~O<^ݤ~fˑ2d9FfWsN=ޛ-2l hMt۞jH 㣺pkοz\ĆHc9t<ɗA a f4Z^do2_BcӖ{d$j*;*ˑCe0byY>= >iK9,y0]3#k0txԃsţXѱc49bku*`G=WׁG=]XD[۔ &-Q@ !mm%R1bCxتZ) S%KXL"ʔ?smwTMN'"CT`e]?g'is \)@q ^"kF8Aԭ4nnsxh'ai9J($;+"U+!D4K5-nLE492%5Q(mÒZC%.E*RVaWV(^dLFz9UlFϕDE?m+,E{9A+[ː#0dam RgQ~{ C$911bD.zళaq_v]vd c_صݜg {E˨ }۝N^]=|jg7{-W; %(ޮ_ڸb=]ݮkT OjNg7<7 ~F TR焻2_& 7 ׉Y祿C/=D')rC\>P-@5pCG Dƃ (9k47^^crH$ppU>=>X"F6b#xKrsѫl"6'yMעeP֧@xpO \)` 'D{C߽~.Ҝ,c ˌݮx w!zzI?ZssY[-'{쑗2UkSQŻҊq׼̅֓Wo1_Ojv?,61Qo@-LdFc hL,K&.DjG8uPQT ]u\(FN6_ע"aY8V l4;I*a_sD%t#hNiSW2;&S sMhzKqX'qf8u`ɯQWI115fG{U"< k!C:U_% - ULyԃs%^jwKw„Y_}`Jj4SL4&I e)tN\86Eqmr# CNpQjejޠm8N5yUue䕻XY*M}WEӧ>e+,'7}rpS\GQL,dѷ!1NVi|}HT Wy+"WUwiЧM#F&kyX{D:wuhszLL}]roypp;|DrLND@x;t\9Ӱ? LoKf*+"K"gwuc-v^n—2>9_̯}.ֺƦϡD$26x\ qF+RF Z%^P9Z~l1`{Mi%LDcB Isc[kt5u0`N>?qzzz\ػ6neW}9(z('AӴ_kp\G7lHrۜ;\J"]mlAK%g 934i;xyJHlVn7U AvH?ȶ\lw˱ 1[z9F?X I1_kPCk8H6.zAt Z6{ZF)"c.hv)kJz68SĖB(m Sx=큋 \+f1՚;EY)ucN!U кtNXʽ~6BgI'4L'? QDD!Qb8NuZ8C"1Dpp9k{tL9UtԎݣ36XSt^{zvuij\˛{B[k'Z*FY4ϝA-3𩰹4FԸR&Z<ύZ'W16⡬e,[QrUL8cf (NsjWyj3hڒᄊw]CxOw )Cπ# iZZǜ` A8JKp?YxȘbH=R>1]h†),* c" %X )b)@Qkj@9kj-j '0f4^ D>liR+srUr#~ –/2!ŵ(}Dq](/m< 3 VTQ'h #RʖyW bp5VKV~ {@߯~6`!+ Tjۼ}TD~Lvܕ~j~(:8; ]kՏo~|Aa6C`faV+C`U%ۼ`A qYbcAҕ"dذ/~-L9+%hc "ڵ;| |hFa0lNXDŽ4W8SIuK&T`eĈȡB`jAp V!j΁)X9G%h UŅл5SC}YkQk3GS^MvJV]%g, Ur8Ӈ\u;{,lo`r\Zߢybs3݆13]nV}lW) ̮ ogUlvzZ?y-]U.\7n^r4E4F$B_I6Dº Dtbhbݎp\/nۺEjRH (SkwA7F3hQ'V׾Ih/!/\D7dJʔ:#`WLOk&fyx>,\iZG2E\zrz3,j쌗Co|p4n?\0(?_}7n=_C<85A7/|7/pQ;<,d2$nETWH%en)BD]nINr\ m+ X  sc'ǔؚWl=$J a,%'"/35[p'%Зk*DR沀88BD5MospP/0P3 k尝̮&py:?ސ yqKR h94Rj-ΕNBkTVR1]RRPS?cIXlK%'6!LsAV`5ocE`z-[t|@>qM^ݥOER{ER)F9fp*0hORNzrNBL3ViHV:ʌ(0dEz{վ9D0–.gD̓ a EȦL'鸉SӞ?TK7َ1%CޱV)7(3[c {|{S9cct7il^+-Wu'fp=f-bo/X~qggol/N8F19%g_ldsgd5XG/f|OFE1mRmsOKQ&=ӿa'uHA8@H2x2%@up6;31-NWYо?&\rpEzN6jjaΧ5o8`͜cV"Y4C:rZdC? SQ}S888NY<9ۧ}" FB$DJN4XMpRNKfFQ -G0̾~*jocr, 6f.!Ƶf]%fơ%.P4h#f o&I`#B+GI |e Ԯ ?9^S )pD@v:(}1]J ssQjX k++BIoqA Q Xn^s>J|=&0!􁭊tܠ"wRsG!Kq(P) IeH`1kGN.k0]%t*jcaRfd.)G-8Tɍc(~ 3r" r%]:A`WFȄ~Dsu ps.AlpY+QXT'ǎѤVw$~!>p)H'M3$`by":c4nGSJ;nфZE4J}ԺIޏG<,9FGNj}}w[4͏H RnѻZa;{#FK- ʋBHI-A_R,.tBv*R*'+5Xf-&Vl[(1k[Zdr^+ xj.B)EA$גBY \mΚQE Ǝ!8`tgAiiCBB^Fɔ_ZZ7UK,9fN$"1m]?íhB7sH ( <'֍ ُHrD Qb >K !/\D2|Wϝ!δR9pR3jPLi;KͦCްtS═K!oE $^iEMLuMt7vχe&ٙ&#($!- ;٥iT>OژfG?6k(bϨRXN ҶmD%WJr^NJM8}0l9dC,[kF]Sj;R17`1zrbJHS=fFvUP?X;\0U&^I'VmB|UwR}U[P-ŸGJN|&q<]Z#_ keF<ԝ 9xs5>$=G"*Zz;Q\{sFf  N0GƯQ]9ϭAjqNUћM;h4eX!k[+xyJ`1E9ߨY_=o<J-Q1%Quބf%.˔@R>7QMҥE"ΐ8'k|uzBТBJ,fM(zB I^RVQXsc b_rHʕ .޾fɧ1αiy 0%_yt :Ǣ9[`96,ݒ@5:(I i7_&h+) [ ?l^6ВII;YOBԓ47^)5E[tJJdчN?QmA>QS+膸1'6= \]DOb7XOb@2}N(wLVڏnj˛D%:WUȏҖIHQfS4Bٽ͋8kiL!pf`T2k%o057_&|2;)U"S\)e22C,(LIru Tjt,b+6y ç:/ CMp3M}J'c-\厓.U?zwm@vmCcdZjB%ɲ udFR4XPT'/%H#Jatm/9-hmpjN/)DԺt{`E]_! $ 9roѩ QM3>anN@j6j U"=ԃ?~XV!/Р2!yfu~ c ~|Pk^ݯ_};fFY S25VB:̙V"qʱΕ2b~Լ@_ |>޲+v!QU ?*?XURcV=aKyڻEXFF f RJj#h]r p; `c49aHM_Wh+% %N!Z+xqٶjⱃi-| !%PNV<8LuTmq bjt|$P͕K}'8vi9Bi!c4^]bWPP,]PZL>M򟓊w H2 b26șcʓ`t t,O66~8t|t伊Ԙ YőS9@xǛI2JɀQ4R%;XMvGDRdzqqs9VC㒟Xd+ wlV013'5zC;))$*Ô0H)1QEe#u{l(+kMjCXj䚏K[1 dccet$cɨVAAV0m`!IHp"ppnRA 8툃f0~5>WkD+:@쿿|(,CG@M" ɄcV݀F 5 g&z;JDiisiV#bpk 8r:H~bH5~A[ @~\^\Y34gwpr^HGJH5_IWɥɗc;8[@[ָ L`Yrct?"*ȍ\lreY||gMOd!gG%)ݘϏJpjJh&8=*v(&(ᔽj.'EoZIAjiz: Cw\]ZN~=Hf>&cXHΧgL8@s ]u GTNӳE܍?͉p7E>91}t(TҪ$ULJS+%ūM_)_'SޠhL[쭚~3qu2&"bT;DCRU28ꆇQbbl0˄]zw/M.ӗ?c+݁8wggH0L˗jHDH?ʀH6̗hGd{<^Y5OIŽ?ݥkp/& $Wp1v"]άaN+pa`&)>3(:4 )/th,vZq(g5B7\0:g*n"UOq]lDŽH*0`+[}{Ob(:gȄd,DlA#{yI y"=<8'Oi(Hj2?3Iz!x/Byj[!;4"KB}ГM\ fnU]Hd@70[lUp mKgF{?./W7uoo& ௣>?xuஉJ'ˋ,I6/ nZcV[,}igqm7x Lm [ީy+RT)iw̦m샢L2>9keK,Clz1ZF(G)7qaiٕ[L#OFoFοj3|vMod#M*fTWxSܴE&~__'$um ԿKseC WQ݃.I,X)%¼FE 8䅳h OZ뇡ץjҭ9SFT暲ޤ[1ҭ y,ZSpG#Vy ME%[ݐOϒibO#QYI)e%,ٵ&Z,Bi꺭>׻ӶcfDg!V3tp 5N3gFG-PAiѸoMb$Z*g'w@1ĦtcjٗK/Z C0dh~ry!/ff!{EB\?5O'#YP0+Wf^]H7,ޟ~:(B {9D#g(NDe1z/!OU|"GVO C:zj R,s5k4 )?QHK2df,KJj&I doYc('|` hX0<t|}SumtOY`!r(J}IMdMhmN;(!ҥJ]pxw*nF~ c :9lǕ %jW/̥9[X]PlI".hTQZ4tx}Աaviqayyd:&0aDiD *./:d˨$GGD~Lмq<KVydt2D[ `R/~wglEŚiWW Fu-8}u>ni|M )_0c1Wl`cIloZoҭ J'F}ϻiyd{ =_\o53 8Un}Б$# XR} p"AEa@2:ZFwFKI;C;67A"x J.`0j|+彗oI:=:[?8Y{#b?J$1Ġ20KB&H8T S ?}! p5Dq2dD{9Y7pE:ΨK>ZjfQ +JS#\qFKa՛Mm`TLhboZ-Qɘ}#;V)k ΍SL hŠ(ϒ)5nBT@V܀?~;ό ƣd~m˧Gfi __߾ I;% $ 9yw+C?}7ɋ+x~\?L{{zmpdI\)`o.|`tȄ|;4^;iY}ZjF{AQ2c]t:pVn +`{L^$%> rb9߮=^R͒7Ds{~f# [,Q,4o( j1;< q.ye#]PRMIul \Y>Ogg1VdOÔt_ֆ.Z<ݻؼP4EBʍn [p \uTF~5;/z$~aCgHoˑSl Hp0 9#{8&vUr9p`P4`m`Lv5sgMBcWv Ã'kzJJqst&Mてޚg3!/ExjW֣ݱ&h.X)]t۟- 4&͕݊nmp g"R|ɚtC u h$sMO-Z΢;SH0ihs*Ӹ鵋IQ<޽+У7fuW7 ;u҆#H]h+B>4Kf"J&qJZ%PO'z8nd;ʞbOGeO+ن ӑlCS.~R,&{<'mjz{ߞ};‹׷odgCWϛ, QdYp{Y$ڲs B P]SbQaT[(AZABWq<;/st6w%t/ wEWZLm^-!hMmi/#['5+vBe:<(ЅǙ>WVCKIР=ns#>gc8HÛxp,9~,{+g"vyRsv|AE-k< r#4P0CDRYDO+5M_(Gk6N# ?7وB8/V޴7'E$;M0S! '|$Ǖ*jɍZB<ꮱibŴ儻?NҏX*>J&('ۏc{3cx9M9Mz->J,ssF>|^cLZ>1s M{rAGcW`1io-dY&4Yu˲=.rq9LN`(HoWQKs4Z(>[}.TK_ٻ7r$WzMg4 ;c^vPH^Tv/oPRRTʒeR*" bLܧ橺_|m[4:鴀s6{*F,%^Ĕ;!-]tu"Spk6V${lno٫;.I#tGw 141,1hzԕn#%Ө?Oc[bJ+m;-[>ron<"2)~ՇX>\^ 8nYibHHZQ950[-2 ̊~)/QJQ cҺ$/83#tP+JCkޕ |vFc4HE,1PM :(Iڿo y>ug\N'c4.L(u?8DV^_f:?E%G@H#G7A#\FY1EK*af_,ϡ)M;5޾-b͖F$;.~9aDf)h@F'{hQZ$ ;, az*<<{J59׭{uPg.nߡPMtSv58…yʸka{(>|y\ *—5) M*v&ok.tErɕւ"d>KŜ*2ĐgFb>O1GH+JݞI Tny9u_T)2)LSCF S#ڀe{ r#pS C]Jj*Z9NYc{颉*ȬAPVʈ \b\^פe16F9ĴRVVktf"K""+--.1$zm /vLd|wn=dQͬ62 n"E9n-FhipazrQ@s ՒKO4e:.֊1ES)Z2޳!Ml$C p`HC]C DFFC(VAniTJm%{V(f(K=-5 Nv"b8+eI 9i΁AaTSb&Ti}>`f^E4e(.p D%y2ĔH1Z74T&!(W}OWx̭rxKAoⲟ#؏;T>[s\~W] 'o؛׏_4 TWG?Nn# B&gӏ.мMgK{~3yO\Y2>g@;u]_naUX^.0h#%noUm08=(z!x[ lSrf8\ '庖Kz㮤Sp a<4j-ˆN֐<$9lJQE3YG %ջ"%QIjK#)p3FP÷yWLu_k1.݋_y%P[cacy^p;C4z݉tea}}F8d9tAA ;cR|A M_pc(~~jH}b>ʓY"Xƣ]:xv_hN>و[6ݬZ|?Ir>0&;ٵ`ۘ1XFfaN>nwfnƷ89 tnvrqޙ2xUX鸼IS⊑w;?w)y»r? !y?$G ;g5%|9@SLsqHX|$4Q/[9/G{Q9ٿYJZ}z+4n>fu 2MY)!hCsܧ)_݄~Lx8O?٨|A1,*_Rlkt AƅD1EXsw`RjcwZ1%B0VFS'ij_d_[S[ؘ祍7~n},;$;y ost9FI={}TT"׼ԢUƴ(7Q2; FLRy䒈ja*;gq9(zW->|*'p  WZ.pTjgeDYA0HKNZ HmWD&Oȟ?O)7篫+ qjj\Tî4yN4X˕Rm%RUd1AgjXAIS؄`–DaV|<#apɥb5T:'*0zl9+DQ霂h=uĞsufIZ:'ZᐮniY5fAe[L2 gC3v7dzi3 `]<@-KH8*ETFFtx$WI4_UB)m x¨)9HmYE`d4 ^`"!, >o@:_4|M:Tx1ӤZfچxnhmnlj8#; A-+.;t1{yvf{~.Pdn~3fn?zK;4LP'keo  }ra}Mh :!D} XVrC_]2YO駏O..RT>^^\N_\O/Curkwd?5 NP&UH., _̸21 b, b!aq9ݖWU]T8J.K meb>_P츖@2?l%̂$u&n>K7H^`@W`N8awh*IwuNS2AAznE\Gjt HЬw'Oz'@z[n/{PT> eo'Wv "z %擛>|~y&n93]^a5}?p4|Ϧ_#iR"a=>=.#?m%)S n6q0x) vDk kϲL^͍%NmrR QA`RR]G,)3o<8{ggIkʜO+YZVE1G"3Fn(ѦNĘP~M FΜYnX>RV@ɣjR0߶u/Ğ鋱]-֒Gꥰ5ìg2L+UEYAxHJFrYZ|rk(҅eX2z.I2.׎qvHZ:৉ޢ81{m>kA9b>8MJ@jPʄ"",1!Dkô(FW2)ϫ/@-2dω֌d՞էqM!w2L31>k%kWc+g'߿۟HO 9]>129=n0xN#JMrB7`9vhZǥo?w{IFss" _O\cauA?>dJj/p]TV,ևc!]2IP?:t#":]YsG+Lz`=113+e^D"d,y}Q՗!($htU旙UGṞ0ؐ81X!8e-5rLn7|wDKv["t3xI0i̖$ _|Avo0+gMeu7EQLXb&9a _BJ#h1W(T2Hbr8CwQK}ߓX^1a7qс%;rz+`񃁩>n [@4xë!8f࠮2o ;GAgMIȾ[wvt౹YX"7zLi:\o߹awra_+=JbUHk@갻s'RVx~ DX# eewR1&(!5j_g#yhuuFXgĄv_ 2~pKWÜS8vTz= w3.o8AⳎ@\0^d/׻?`-S D~^Q)a> X I0-`uQ~i{nr8\_#nBs) `BFI“87%T$E)*%KHGrRRxG# u>+LJngM% ;QmsO{:=~ܳ5 l\dl%,sclTl8?Dn9,׈XY,@ǚM{dc~= fsV ׾F.~f>(ּ=Y`?kJ4gtYwԆ/`t3٧8̀8@#T$Com/RcMˉ1r0 ar^\\|q&Ŗ=?!tyTӨn)D1ag>I ]a.\˶Z{C';ֱ<=:U"b{x=:{ԉx I3JCPw=yt@FjhLT3 ϺAqkT{.ȣ{dAeF@2L2vrFKCwsQiZ{VOdɿxK[>-^6 .]6Jꓐ.PcD'wqMmVƋ?x<BM^T;>|g3ȋ4_6i f^,1\Bt$b,2̄)%*610x UTS%I$6Nx F~hb]}t笶۟"JrܮbfLӇ:sAl١g*Ӯg}g5E ?$ejR̓7Q26@4[n˓Y係fH)W{(iu'ݥ?Ǯ͍Jtxd(WA5IW.e $=Rr9CXWgoM+"B ' 7H̑xvB]l\AR(eY i08g3p*y eZhe٤sK9mVj,e) c8'$;yI+JS(}Y3I%Hu噴2d*QsAwhYysY!w5"6*L+ߖ̇ZB+ѓa6\273٢ҩ3nЧ\b) ¢uK4J>6 /2+Bw 3,Zʛ/8m)ڞ:viTS([)7"X԰{j+ I>CFU!icႢҰ$$A͓IiO%Ac2zx%CSԺd*Es|ec֜1U)vmE]LkNuw+`N3o%\ @y+HaW {?kU?=yJ9FlVMce;{5HW.^2U(*Vʺy/gwIa/W}!Dn`\^'Ⱥ/@LtK/.w2K| ^gK|򕋨L )k\k0z‘q̕Ov)ᑎfA48Yn jl9Z1ڮ`'̀:hDZB 0j,+z؋px|0Y9$ƪAa2!^WuՃc G`z0Klnp"#/v*T&$:J@A %*#4IT?L}<3& S(Trn҉[0*σW6e-'&>wyDj=l,K d8n+U:A9}c"ƿy~؋*qXl<&f "#*Uhz.i\v>'^tt&#ʱr떡Sdm)VQ4x& {ɐdmQ'RQZƶ{@O T],&(x-cbUo3_^mm+, "!eaAh PB2,5Q/(p(QЧEOۏ|DT" iJ1flϯV+Ȑn*9/.xפL^褂aXӢ^ߥ")m 5 V_C7;RuNcJ.NF6׶}!&%]1>KʹƬ^Ln7bryé^Lewxx7OQW%_g@%yCP4jh 5_//M);WM^NY}Y^T k ;'`/%{p8/'#uCzr ҦAs=E3\ώW| (ыsPI=P/N.( ˎ %߮V0F \TrA׽\X4X 4oMlsl/(raAeK. 'ifCYSp~#4pWNyݶ(kkY|$;ue:,W86Fs~XB#kF Vhx>GX Sd$BDEI"E*T!|SQLDIѤ7/CԒDYKsc'wj{ldӯ/lr*n֛ȞÃOj#]Joe?l^}K"`%|~G^o>XxL0~00Ӈ "'.77xë!dr2v 3@&S$\d߭{~gfGxN,BH?}{p %R">\o߹awJ*X]G TVq*zN6\סá,%8xj|mxNʅ!_ē4kk_d8u#+.>󣜀\_y13s7a_?=еV ~4qP/[(dsL &_n=,ب-{WJmZvaĜ`siعp G ؘpk#YE:ҩ&iJST: aTDn2+lRrXFw5Yk%QXP$Oxj-G8PD8$JFI8DT!T_pq~{=?Os$_ 7M+4 ;%yAbb 뎉(Zф*JTMcI3pLE&aB XP,W7 2gl:2INly_qS!G96K ڵy<ˢa v3 0-)eឿc儴)xD0OC 3,װר(&*1,%+@T0TH5$9nO}'oYZ7LƏC[w8.wX@`s l_щcnWAל],yr;bKz6X<QpѰ KDBĢ(IIs&øG담v&o3IXplzfwa۠Uؾ8xdra(nǕSU-|°mV{jxxorYxQґ.ִ+۱ Nn: SZ@5]=%pajC\fi{'#p9sdnD9`Oq ٽSٻ04UhV gʚ6_AiR7z_\JM80IʼnDr2R$hT;%5>[>m~Ӌ"Q2^X_!G1u3:m۟8?ԗ̕<#W7Wnw zs{X5= {7󰀖h7&tӢq7>zu |?eq>p^Xmvz Ou\w`ZaѵsR Z@\y7|孕Ziu؜pyB/dB3@ўuy]ezOJ) zs]P[ji3]o8q5*53*QX7޴Sta5Y˺ ûs8gsRYgI後0Fgqj5^;/W_ƻ=jlƝ_ǛԵcbu&*PLk).:}şN-'gT=R=QgbN#IO8\t5XTfd^8mo6ulGIDd2\J_Ja7 sͲʉl]#w{ F?Yw5 ph=OK", ;`'QIp(;p2[A'녮 Wxū|q 'y+2^Z-j4WpN'5qν IV}P)Rj wh)<[ (a镪9O꽛xڟZxu7*ɂnձ:qu3G7|IiGF9h\z@WI慊~ )%'%y#QB=oL 9r&sY2Q;k&|BunLp! EX8SR?SEA&ԂHT3bHIJS.棇`B'|IHn5/*x I`>km0q0 fL!qPʐ /W!%ype5$8<7*p_Uw~*W_N қ,QRulp:(&(!lMd" K#f8*4c0$s!Q僿*)$d, <='8ֽvnowBÛYΑMW)eo'A<f. JAw܁2DU~ F(2؄QG3"LHJñ(Qȹ* jb T}_ d1f1rv?Go}Ln5K@ם}?7,` v(9m=$KM7 q#o'&+vxd(' ~p<?fUߑ<Ыoͻ#B$=唙lMLL0C L`Y68?Ćĩ2ٓMļ9*ggͧh92΋_o  Qxw]e}-v881bVEOqU>;73,bZڜagHL :10s&,BQhCk4L! yxrF5O*01@&e Zdʓ>rǟWϼ!2( ސ7|Yl_N>>A#D$ /lg^oSIE{&ZN;c %@J?%.UM c +a&_?{௶`"􌇏<_6oodHߝvF8 z*S\+ex[ &b~3|ӫk 2/JIy'K42>-Rf|JƋYHpI *a xoHX7ȎQe\xhX3mL! -)P\>cMv}i􎱒anp)EPl# ]1`S /I_JK]LA6wo(Qxg"1M4G!64D",yWఋ#ALՉqL1 *ƱdHNaH8:T$jCcaHF:}446Ii5}-8|aL3j Vc4:41<Y8pD -8$ }Hrp?B@.@̉r 2̵$J(8FT#eC$` ΩѰ@ĜZ/k'{t/Qť0[gB,)6FZC,Ɯ#jRB,VF.ifu'e\#-@(F<&P lِH;Z06yĤ$%KBPRiPhM )'LPL#. ӕJBX,Z8UV3 p1i+B0p U,~$Ac\M%WDw/FǠ,b z+ Byt,cd\|TNCwIL]TBӬ*vUALLۓDSnk f=܇(4XF4R21I@q~.S&M!7N{MJCNIO{FjMgq0)dwU6͂"kьlCa^!c^ =w+`HDD&}9;y}x`fGAmb18br]01/ bbmt37_? >R5C*KjӦʟC󆰇(ɋsӍL[l7jqor |BO[]c!YbcN,)40X3v9R/#oӻ @0džvpLb?Oa(N#i44ֺl8?ߗ"uU4{Kك,_?J}ZՙѰLZ7-3gn^ڪNMIxg}Cּ G wzsc]#""i*F.E"0$<j%B#"sryrq L+0+'yٞ%2BsbCKl'P5z{+{qߴUڣz)v_$'ANNN,ƳK#>'WGKra sb5̾8Eagb@ Er"ON(o=KHFoVzyRZr=n?g׼v/ U6rl" R _ggVS2+/rFF7Hsik\Z8SE:˳gC 6NNe{kM+o4vj5P)GiGsD.3[wqo<935ig_<{w+1{e+sVӶV %!Xj[@n8sۯogIfqQڪ0;[F'Mu,5khNv;FN㰲i׼@+ &O?X~.0I>/ }gă{ӧ#}^ݐS^{:k-q's![GP'`UU2N=gd\8{* 7jf?u:Rr7ц| Ov8 '_RZxM8  |@&ϰpvBnG3#K_f33~! Xưî!Udӱ&E5/ll!Dss:Ue/b<^"%VaD+xU_\cRTq{:]7!nފ<<<. ?H(Iȉu(\ДR Z}rj"yZy7<8PTWzѢ:.ޕ eX= oe̢kjCv{E!?hA^2'#ͣ,;e n1\)3|]At*j#|4ImFe'ʰ<.ŧ;oeeG/Gz.j~~@հC;R\&ZinpN1$'D$/!D],'Z)ShV>Dv&aj-=B19z/Νg)c8B< s$Oh.Bۨ(M RkݾY,h2BBS~玪ķ sͽ ifSQP}2,?0YEG̷oA5_RY ƳXE0BihEHIiȟqXk,QRzLJXQ8+U(^ #?b8P"G5lzwGGOF5m7ϣ%k&3QMI35zLVJLT${ w[Bh_#e "2,xX3(N!0FKH* !"_ BC=@QV7#4 ١M XG 9 GOI%D@ !Gq`9wF:S1 !=Jm=wC@2+jP!-@x=]~t4r3ws9tϊ7'\`UwEx5~Xs0|^OWYl8>ΊELJeF^|{)iX'~ީ]0n 'Fڭw;<)~UHѸ`п8৴[+Hwv\N5p^ \G[z0I]\C Fw3=oa\.PC4^}2f 77hZ[sUr0L/Wes$HlnrxսA}YUqe\c:Uo*Yr7SsRE-FQ,zRt~en.#ʠG?zrqvPXZ@UbcLupsdF]q7{֚)G N:KJ(&'XbrXcBukrkIggľ%1Li# ʷr4nur5f'jRwXMJ@@LsG)roU<,i6%*_CL`4880U\ ,/e*Ϣܝ(~s5sLAc҉UkTvsrgez75bY r#EL̤\+9sgf _TLPc,ϗdѰf.LnQ6Cm: Q6@lz%t$OiZ jTtj x"ʙR&+%yr C"`Yɽb 5Kl~qIMTLB&-FXTD6 X)ϨX: U6TFz"oӹriGzQ [ݑ !kj*ڀy$ kZ'!Pͺ&ԵUP9E2+t/rtӭFFP,'gR*ML$"(~qO%f'1%ECE8H[j#f$OvfG$np͑=мuM&h&J]Ҭ&߮-jvwE+R//upOr|ogoӵ>ah lgo ܽ']1;Ysι{Ns)I e{.ɽ[/Itn%jr`|,; 8E e$?&5l bb4 +8IX0Fjvm,7OWם,7r&UZ#l V*!L+B2QDN]Q>qH2POQfq%[BX+6l-6T k@0RD700r9$*&H`k;I%W tkAF#nBцnA-+f*K@J{!D[IA; ۤ=J2 w$gTkv ~fINzR_' (,h=^-nS^XZIIS _˻56SWWo~}Ǝj^јXXbYߗ/5:AlhGn ( MWi=>x'7m m g_{Xœ4 pNkǢErkנYnpLIoCuPC굢C/TzO|qb^Թ')ݧ$g`gJPc CI/|V}42Ymxٍ5T(FyI͹\iA[v?i`'Mv?l&ʚ1a'{h`'B)CT+K 6[k-u9I_$mTHP|P18j$(wGMCM$6j!/mVAWovnٛ Vf`/4bSvSgW#rʾ̿: 9&B^UC}H&#dX^"Mݏr9.mD) W hB4]9(6> <* Ӯ *u]L뒍h!K{qǰ7yȃyε UAzE~#ݻ9݆ e͎=S 8C RъI*/kCIT֋w"B+w7Qk_ %@{rٞh\hC+38-k6*P?݁*a u3+lٸ_gW;&EՂBo<#vdBqx]V#ϳFg<y^ ?re%8"p6!=4-G*HRx%w-s|>.jj&mt\=FUqY6[V~k÷VeP0XCyt'!Y!!xb}2C)Р4i'uTC"7!֘na|75 l+"yL͍VHrՒXdBiGfؾбC{HZCJ8!yʖ.CC0=o>k5]T_5>WhߖX>7VV__{+)Kzvow??nκ?''c4}'?GQ}zyu@כ9YSfNoI&jT Yb);q /?"̿\\ȣ~Ӧ9nqN %mx̅ix$cC~n9F줣rX[_zm]I>q2m)I:or+7L^3F% `f7ln{}a g)^5nuޕmٿ7Zy]ڿ܎pm3w4 YsXoS6A֚zI h [xyKU,WHqNWTX5AeJ]ynI!HɑfݍXîux:fax\ܾvJ͂^6S*{.0E{"^<>(j ݤ>dK;"emO[=v.gGσG3؃^+E2+X5?3ᓷ:JNhT@P%FE+E th8N.M]L`x>޺ϲ,m,m[zDAE_@#@ƆT5"QmwشulujL++͊nyDjUk_f*U%(&T,cLqJesLHd(KyR9\qVTbІ& 6}U^SCP`C)P)P˂ VJeJPXNLSele9/ߘn S@u-].>?_#z;?IG~ush(\Hkq;YV>]L텠;ݵ8Ḓcy;Wk[wB-n IN+zZ=^HΙ'9 Jy,A9(ʸTٲʊh䊡B_HT@g9d_3Mj ĐڋxhI+IB4+0T%iflM `l1ُ˲W_u3i.swwap1*4qyrъ D/ xKH6テA R:̋mm[ӗݷe-YZ2ի ٻH|qA)]:;Iŷvty}Zi͞_IM; z%!M[\$; E)C5V129qkFZUac;D;$8gcEPk\ClLzY=câяts╇ ;LD{O[Wǐ QsP@ MA! a gsO{|7|C6+6O`.'_ /$/8Fׇ $")ɘ>^X}ej[L%2v]Q+unu5z̬wz̬wf L lN2 XA)74mXyk j[78"$٬}_ԋU˷͞5#͏g׈+mfa虘1@W'y8ɘ#WqmCJlRjFiF=@D;ywawW)&&%Ao1=!,ۢ>T7e[Cj&d`L=OևajFggg~zrc 2`pzIn7Ʊ<9GQG>d3ͬ6\Y <1a./QW`?Me5`GXt Ryz`'MMߧs*epR;לnIs#]mk1gT\.OOurқV_J­Mi uG\0!ڛSҋ91t_n]vKev#; 4z^[@ u?92f"pa1LoaC #1q?O媁I )*s޲½N> );tsu¸rH*l|%uyX;-Væk饺_p'-r}hWI]ؿO@O voZͭ#iv6-AB26ɉAΎ"ܘ:Mh_ʕJcM0^}>[CmF6쪗ܖN >1?wv'$%r{UJ`^%d0֎w>Fh0e~pۂE?.Pim-d/OZ LUCkwN:զxFhFB;4R/R˯fъsبIE%Ņ+/{\`3 =\W :D 26 0韃g+rryD*rD Dz6&R?!CS79j~3<|>q9WX&.8֪,R+|Ŧ>! ^bT]wtqq߷wWO Űb(>-@\ I?ɪib48wk8nn`7e\˳/o^J$^Fow??VQS;\͟2y?:F@S*'?GW ~9;;xPs4kG~3"fcQǬ^/si<5Gvsq!Ojzi 7K34^'u|W>´O=Hf;Rֲ!n?=_| maWub5~ŶDb|.QIӫ~qcʐِ#RR׮t%}-E1G/9sXvbG92u :&Rh$:# ݷ/΄iĈalhn>sVF+o;+uwb0t 3WyPcNjJl.gddoOF7onu"ffNC0#!\{'#ARW jZ2џ6r (:9Whlt{ h$OxlH¥[Sԁ V`>0&2iT۵ׯ=Npn1d \l1 368]Έe+r].nY,lJK(R| y@g#}_T2<0t:s\eve״ÃXuW7) RBWi EheE&unnS5ޙRssz^)N'&>eqi.=w> !$jHe" c'CDW"ۯ+6Jc6=kO wXԍkw>:x6۝^|9-o0uxd~r[?zspvP?:?yA۾Zk5ߓnm$ 1Bq}wuW׵-GR* }Ϸ <_`)7OmiRK%ҘT]/Ėqw S”,%QSmeD`6~,6(<?'ZL  7^:lQ鮽Nmi_OHv+X']w/a_N{Xi,b%؃}qot~|yݻ A>\pn6M ;jÒ7B\p,jOO;>o`sx;LBƧwh]YA9nt?~ '<:oڭvW9~Bi6_Cui4!mӾJa9oo@} \]t-yڃ[I l-1t@(36|$TnAV[|x`e]oY¾5򽌇B^&! aVcwbX.- XG_A%.o89hY~n\]S_׮|HFvIuJ7ʘ9PK%,OQQE9I5j_'Miw1/%jʯb 9ft,WjhG~?b1?'@=> B/$X#7FX,/c~,^Iв0bM`Fir]^lpIohI7۔)krɤL]`AOGxbyDQ}(bzEq=~r?>5xҳcBd3ZQ( |Bd.FتQ \P5c \N2(D>fܽ˫lz|!2Vn lM^5ӖaŖe/^ҔcR矋͂)v[*R+1E^u͊2w)tMM]LehvH}EpFUTYeQ/8\^}_¿sl_4Oܳ<9nlnTGQ{F"_+xP6^H,rJF퍓'>?=c:-_G!:MFuK51ydq16@{l1Ub,D`bnG}jh+mjݎ^_mFkP^Vm;t>xp3'eG.znr4LhsM-ehk7kgǩq->oh2L:)LsyCqiNFJ*ܡR ~'~KͶ3ͭ_bWf;7:wVq~@uOe5ͦ5Y[o%έĚZ,֓w{QѺPlweԝa)6?J^ZH_'_hQ_/ŋ~_/ŏ_R2a0 F_UT Tzr]K# u~+FQbY C\1j+-(aLzr .ZD`9,FZsd^ I<^@au Db.ΈNN2Hqaݽ*jZ<<;9{s~~gtӽB;M{wNbÍ|w{c6mՋUom:~ip_;'oNϏ? ٰ׽13/7g';Ge^e|~Ϗ2}f}G!`=m3&iR_S{;;p]~z9;8v!׏'ӠABe~۝Nh; P]jUa5ov?흜9<{w!G }2@.%mxj-u#ܫ]_~Mګz #Mϧ!E9>5 o1NMl OUoeԂ$^DNYͪu1_̹JU&)ZxUi$@sg0Mad-%uSȆZb͌GbJ'xh*dhn$3 4ZJ)!烰D`bs_j8^0h"`ՈC T4a S2冱Q!97kB qg,Fq 1hX1Zc'/rJJA|+f"8OcSx )&7Ѫ>uAj*hRI`. V,FU"Dc0BUYr$Ox6 .|ߪyDCc UE]rzm_TqPpe"4Q }(#+TĢ C Tּ o4f]o8W 9\70L~E@RTnJ?nq%;m#Eu]}3N]&qkpYa`cy:pRFXqn3Z*T.A,l=Z[DKO-H 1V[]ű ބ l7D)^y@_~s:X~Kì6OR@~/۷]B<<{}lLd#yyz n !""p~/7c0Dl<]6x$\|@D(GaΧӯ5bXR^'IB!l9PN>aZ% Z+c @BR a~ W}0e w?;ՙ>^5'Ez{_vvzA$?a?}[NI}^Cn#^O,PJR*yny_<0zHSRpW7{T}jq^‰u47B^OowYFLS8DC⿄u(-䘛8(M&q^Gsò.5EaI JS]-B9+AeJ6c4/emvͭ<}"Q܉L,oB:i $-gf$G4/GTYliD^bșFT3/˧E颰(AVNC#|\Ҽ+Ai0BSPB 3O*]p*n<ЃFs%fڭˆlڜ%7K6 Y`i~W! ^hmUOx遞4ɮ| ['|ya/c;;!3 gdp#I#&<%HLbp`?9[8SeO^':#|pqəY˧yrؗ)#gt6Kz <5@Sg_r3Pj7IQL2h_,gcs8+Ch1bAvtb/]u0)ǃ3sAayG~rzF`7ٽ6üg!K\_2Pӕ]1َXyQ7N|=xy٬p1Vh\vBMPfxl_gqf*gqȻ_w n7uba<\hVs e֧<I M{2z2Ќ~Gty^)= /|V_8f6JrD֙~] #Id׫LƴЈ`޼&A_[t(eٷى:![bDLYSc?yCo~ !o Ȍf ZQiM-f{R >Bsh*VZy++)U {$MSEx4Y@<`L#d|5hK,ak4ifA݈n{ԺGȢ'%NbHI(`bb K-B ۘbshQ;pF8Y?m,^oin7Ҹ9̃-(o־`̸MddIG͎YqbsidDH6LUdXMbf5 av4t1Gcsp3  2X+-M#KݪL<3ęn^}vK0f3_? -bIq(A>Җ#Ɣi|%aHSTc'I`@V?%q$Ҙ /h &DX/ *xYzhX謔b3 +crb'߼kC FRէϟ%8J?QL )"L XHeF0% 1:k$Ja7ф.UˇE AM8Hx3z5s 0h-ʻq#VGBT  a7>>sgUjuJL,j7 vSt5rJWA0R_83y)NKDAi8)a~5jH~d 'Xc ~MT̀3H!GiXcKމ@ 6KBX,7RXb fbA&(sj`q{K5r>KbpgX[9Jo}av4~ T B )RHbǪkʣ\Zufj=m<*cY9O_?"β%#PO=Il" H,bi,U1}l1B jUzX'E'Zbei4% ے^t~Hzabֽ"Sb|j'7j@}u+YZ ϗyxGnjDxXX#b L82R'mq4;ʐƶv _ J- k&,?<@G`v$D$a?@P<ˊ  %XEɋ8m,uAHpFZLJG1D#TsbeE'5x(rPgdݮHQ*aHaN+m%<lb%P mNLCV&j-WhP!;8^OJϠZ)JN^ɗʇ ̡UP3%! @j\l&JNB͚K9| =HEQoVi=aP: -y'!W\792I叏e7Q/b1c%‘ÅfSj0%h+|Db$̍r}$8j4hu)Ql2<"gwi4p`ѧ+AD@o(2+ȟ3_#R.ȇX, 5QxtjmH\,f`*7`R1k@y8+R!3.{lhcЁ/ k {ˇNc֣>leGkxɯ2zoy 1_Wf&3vT:x?N/Ŷt^ڳ%ߞ>v܁ 7qwɟd|Vu)|0韞'3xD 'a;t~G+M!r Rz6Nγ?O^~OO||KM6Xw k߼ʒe߸<~u)Oߝ>n t+G"#߿gyƒ/ٟO?v^& F`KW7K`O7?eM'w+2#4(|)a?4hWlrNvg .M[ Ch}:ɾ1d|ee^PL`6v]`-zͱ rkxZٺrzkeGtab,~Nr0z1W9}8-?Ai T fVo<3W& ̼onZoOè?}-|2蝳9/^Fן^]jخGOo1BH萾7xz<- (s6cpqՋij9z3mOWٻ4cP7P"M0t|y`ӫw 4?'!,Oi.Ml[Δ0} [M\m~T=zE-r v=^XJ$qOSp4G^SEEBG)%.nkꁕ2$⮷@"%gbk+Έͨx`DSRboPBZBzl7"K5g0J*` {,mHq!<B _B]KQAD -HlyP@r)^=$ϐ)r} ,| (а [_8.ח4hXT9r&U%&-ԪjTT1XZ'uA |K)0.0$@xkJ޾ FC%*, 1t}]!\QzǺ減T-vuLqF1XeGhB(tm/e#D,]AM6&n={ߵ,'$J)gI]:c^d/I(fj$WL6/0F\O`_89@"ʎgDK$*DŒT8,n^eD'Qe-,>ދٴ- -e\ydMI Ti!k8q}cXK-.x$xcI82S]D/z4?W:\Qϕ\=QQHx9B UR5"Ew~nq (%uw{o^²N&fx0tjtdloVN^9yEE99C:(Sd1/{4NP\KZR:al Jv~ #K?<hgU;wd@ONTϓh,3{ f[#Q,hjhex,s 2IkXkjuYiE;m! @ OG+a<AZ S:F)*=J,<22+=w Ƃ.rʯ +XK0f tH_ |OٳuOPB^^5>4q%=Jy1i_S* 97jюbIa:a3-(E`8{Ǝ{pZR M?AYk~㇄[i޹2iᗝ|frEól"xE g :Pz?{` 8ҥK'eS(& JLW.*! ,HJ`Y(&+ Ab8< k3* Wh; Q-I0, tK8J /̶\$"R0iPL)CHQq3,COGC=0XTR8Ax=DиH?^ _d\LxeyQ<( nQ>ϵ0{ ^~ڐin}㸞t*VݦAw :6UJ DN[Ij ~'MS ? w򥖉 w>'K]Lo, Wi`8 o73FV- \S(RkihyV@7uRbս 3Aɑ<|=yNX' ]%Q _.w$_qXQHV@#r87߅#(eI:leʡM5#%1aC2ɓrBi 0'Ap ֙Me2,{ngBDp,E2#utkQZ՚ۓk"aƋ`7:D]ϋ05]C"VS E);Ǡ`㰫bADqkX{),]D|&Dp ]VtYxyGo_h킮C!8*"p*?E1jG]d+TxC` 9#*ȃX`TGͲ?hȣ)Apr*{%Q><ʥRsűxj%bX!%> zWT( -0`z/t*Xp*$c5>M"^Q!~f⢺ZE-cOjyR"[88lW{ű/_4䕲H;ўi#UacB1cuJYC=fd!l.J`6mȼ)``0bTc*7J,;0[~]nܖo* @'"$|쾓%@ ٙӥD z&yo\3i)F׸u~.v F#>9o~`_*ܢ7柯p,ԅ6IP)[Ǻ9}n}*o'i@i-ȄКvsq% 3]Y,n5&pe@j4@{^kG%1T!xa$wTs.؏gSە)jvźЕK[_׃ӫ1H7+*ìQf:ʫL I:: V ;(]-4H]9bp6{e,29mhÈ3$frȥ!쁥٪7&5@x8oinw9lt 3Tͽwg"j}ibKw;r1_/5_ۑ+ YW . ٟ}UK cF_b,i &~r֗pt%_3ڛ?f ȋW3!ޱ}d8l 1ڦgc朝ͻ|Nk67_@t.ٹ,׻8jyn7ovrԠK,u54N[c.$l(>yBT] ,YXen-ZybsNJnw3av{-1Ae٦ Ic[;YѰumPpG֍{F :,e=&:QtF! BGںun'Sk8G9{aV.jqS IrL Fns{  u3ZC%u\NF-JqT(|'l`RiNIe($5:Fq kj@^n`}sS(eVi{$M%oQRDPeZ($1 iV>\JcH-!GtfZ)g@ VJ kƉAQSLy`KN$6TRboPBZBzl7"K59_T,+ ʉpwqu4;Q?Kӵ GWepŬ| Ĩ4JJeLѽOvrݤX'BXa"ffȧIx.uyǼ<+01nB\wq.<y`Lw,PoaC|o l+׬I*1Pm?f<-,);{~LjL)$hqThr𕲞S 7[\)a%هZ)쪮˙$dO0PКdGAuI=Pj'm̒1ٰeByC9Ųu f?"*(Хs 𴞅=)xu{~7iOƗqNz89;rڿzVe@LPYbTg{\DzJ±i>U]P첫t 1(۫b%-ՎPRNaԏw`HHpG3IEV?Ǐ#4?.uF"'=̤uײG$S + ٙܧje'֊[ƘŞO{󧑠7ge.pc$QYa2j"\0% y$< +]5qbPDR/zlycߍU%5/0F͇CרU4A$|o*uDw}B.iU6]S4R G95`\Zq2rR-9w(vyV/qG8DPr8~ (T ,ߤd?Ήxb&xG{6r$C8sf6Aag tlGYHrb~nj/j)v(`XU::c4A;j'CӶgN 헓ө+4O̿qu)t_uQV?K'Ȍݏ?ﶯ~O2np5.8OwMgo@MWߛyuuQC[W9뺅7 spu4vu Z6~dr_I#e1RT\ǫ9}7\vgsΛ3Q77X ]oO[&qro8]i1Pi7>{_/C*'ÕW6?/ꆵ ٟ6?:NI:ww(?]${ ׻W'/:[ŻIƇ/O ?FKÿx=W˒Ե_7cd\ky_?NFk2D<1L H 8[ w7uK,VP Ûd0HtndsVix=|ˢ.IEN\ഴ{Gݼ]չXh]VSN1e땖Emա: hzQ6D){IQ"o"hM/x 6sTR݋Ғ \~c}+\vaoH ΃S^7eC)QScG!qV{VFK5We䀺"sy}ɱUً*ERqsd,Id2F NIeU"2@G6$쓻;,Ya*pIiqy\|繉y 4?p[ئA3cEGEWH fz6>: ?~x 'd:Sprw)֪w.[gGC0ւhd:{3/H>GAcR Z9?ЗJO%gv bW-A}ZvIX!:\T(Dؓ60p )nʜ_[;>RqT0mFa~PP1醍f^sf Z.{!e$n or`cVppJJ%a)0D8lcS dy#c2GWZ?;YwPP\հX;bA}ba4#13 ZIз Qb-Hٶ+d[ooVvʝLaMYF(UB#i-ڌkv >U&:;P+$vy^RMJK%ctq4}=I,Q#a:RM?cDAp |dpVWa2gƂ>dG7%|{?J O ZQD@XkHaD:\d4%X"]:1Ѱ12DDգaGWnOTS$=Zv%h,؎x:7Zwy/kץ.lOܚ#-@2}@Dm?mC[5C(T+r/EdF[%i1ͥF~(ĸԨRYKf=◪Myd3^ ?囊u)'c*RlKi82Ii"v! 4TP$IR):F7Ō."5l%qƱBA:A"(;]P=C:TC @6іX |9ՖZ}!9-nsL9) v6w 2hWIe [}f&if#E֡P\LE`@r&X}eƹBa+cÊ!65ʯ5 JQ,8+SeA*"T (7hL<从aؽ6adb43VЧ!U!$X \֍iTEjǖr$`aGkV_"TF.^XE𥪽ZFq4r62o t&)/) 'Iʄ%Y Ҩ4NSpUYUSGR2 gZfS-=bd4ί#0%߅Ys\ n]z/T_f=G24rNWmE'i OXUJn>mc 82UN'DD [I$ma`)eZ8lf哰~h|J e/h1 ',R8xB*UF&" pʸz*;93Z4cMRRڦr] غ2I-ϒQz}")H&s)IQ)wmF|X :\7*fZVX 6k^*Z4VBxX`w8+w߸FֶLN?E!t.B׸^Z>,lہVfRuj9!۾oX*ٜ{􊇝x. yūcA#RiH0ǜHAwH:m`9?w2K vs+F/ky]&`_D߲]S: o]?Eې???G!m0y߃|QO./fYM䗊n H~EZ$f[§lhPG ?[! 2y)hf<\O9K֎\Ŷl6[wY"]z7qF* aT[51,+83eMFgQ=f?xqp?vd>bzA I)H [=hu+MCY!_m(@bh;GXՙJ: i9CTQ Ij|3]g H 6y6y6y6ٸl@J\,E".eHYeƨ2LpB3,uY`R@Qn)C~czm9 s!XޱUAnbIɴvЄo qXet<#\76Rc20ucpOl ,Rs\׵7&yVX(0/;;LZ_}?PJA+J"VMFpPV<|2Cˏm|~&͠0/{uq/̄2Ta 0-PԊLzA>^X!"U,?nO7[Njm(Ŏ¸`ok)0Q`=rȶaΟuZMd% %YWy rn69 ,:ryΛΜiZ\]z$,ocOƓ0pO(?ӉV'^XxauE^'>',݇M9Q! .۟Ÿuh6`S)wjgVJ.&N-ʈꥆJ a˙U춚\Vٳp)r}˥G.`<~<ՎϹMz`}F4*fY>nQ|~<ӒL iE?ywC1~j_6oc)RNnY=>?Gߎz^r{[%d1\zf݅o uoniM's8qO}ola\_M}Pd4.J6:{j)hk3)ʀn_2. sܪxrpASe\qW#KG3};7Aozz+~r{vCYdsVM'ĥ’i·{^)a薾70rq2]qitݣpx얙tVa(#ht~vh(d/bͪ81)/qSzkd4ǁ~}7 G_`rOR-b<> F{g 1| R'{'n:R83M][ Ky{ZNԝm=&YY 6\<"^Y(6|pkf'˿p&Ev;AV:kğ{1|LhS!7f|j ܋E|*r-^ Z<-HyFD`B=F3_ǷSWYn/sqf?{/C[㥬 V7W^=~挍NHwV0ݑW޼ݸ/ /s~5o#UD;? =E UHt<\"V ;2H1KI4p"̑f.~ ">$-܀QT> 9|>"ckEqZ![?}oitz~(ߏxqe%B(hEWvZ!K0B9chwꏊ)b>c$+K`awbYi` |=B#5Lh*hܜ6ɴBlT%cGݵ tyA>6mر]Enr Pf*aW D3<_osP?B5;I0n,>+N42S-lAd>sbd7:z1uwz~М`%ͭb\G2ȍuFV1e\6Vk[ k 4KXLOADya8F0#*1  QAx}52 :BlmBO&bX6V6B .%8iZaA0u(UIW1ݶ8s{fZu * "&HR8*%:0Ȟ.jI -d)]ۨaZCT/ } 2kACkP~=Betd=9=)kH Ipo@TARjU½"ḭ1bNtng@^@֊e k*$tV/:T\R`a˥$b=ܶJ f.oɻQ=xAE2sbX7X7$2Z 4VPm|~SG34$h-D`I,d+^:AwWry#xϚ`l8D Ѿ+1\Y)qI`yV(\o+b`B듏gF^.Sַ1@hrDžr[XܬtjG sQ *+hIݨݗdd7 Trqcbcၷ5[((l7~!cRTI<79 _KMwm~(K1J(}.UʫPR@Rh: P L:whF:SܨkѴ4.Mkܝ˸.Oj GILrhCی0 6g1we0%Yo)o歌Q," YtLߛYv)w{xkbÞKof)6]t%MQ)`OǺYz+0Ghhinf\j3˵b9BB U̫ScQU^z_4FtG HѨNc+*§I|ir^GwE n!XqKizB4Vc8;?|۽!㣏ﻍ{~Kn!negh7rnLBr\\vFm``aO_{~Ҳeoo>}!嫀(OG4 XAyB lucN(!1>09׼dѐϒ^mt)ut$ev}2pM2pJ- 8լ29ŢmM\tABKVGF/} 1;C$N+![*: ]JtP6 ˥i(.хٶR6՚aM\rPDA QLU磨M`bZjã +>t)-ʻ-VT| $i5Pk녒tv4EA1qQ(tIo{!\b#38 %v Q(4F19VE!q)V, hA6`ƼǶ~LO `ڷ@\4f/CZP|m91MW(i-=k*b7xI5M49hSS*IƝ}LdVW'WF$74ҵ֒h@j? o!2 Y^f`3kY9#@ƣ?$}и6`9hJ#bnL![>L3 !߼E.͵-g=51HA3C h!V]pFԅ?I4s42NlN,nJh)4cb yugA?{WƱL rj_צsDsq]U]6ZTHK9\j"jMv-:KY`p+cKt43^q2 ,Ag- {R'aK b Sh`oV8^&pC;1Drȓ+YmgSsl€> àn8c0[SM-PS()dogC}xi0_KI\bpB^ɟes*7Z"AnO<2c`3%?n,[<xF{P-GwT]m+Ee:?C(B-/1 t/u;jW:gO+S^.Í},aUp\"W@}֟)1v'\|~VA35[C! 0Qà:&)ѕu07gW'Ie4(MG"Oi3$ӮZ ̮#]bYkM[²F..Ud8H88Vc[zkF!-"0hJ1HbV1)Cf}.HhNB*e|D^`;0/YiM9B=IeE3u8[=鿋 =*-7hqEu[].vFlOuQl6~I[3Ei[b,(#(db=m@!0FAR[vlEopY3H*{1TR:c&C%YOVMvF:sk|ZNˏI @L'~welqF^Y?gwh`;>rU6%.{x؞l.{6|Lཽn<_G.ue̷g'',ƃ ޥn6Uffu}l3P=F!Kufw]F [/C/`)ٳpiUbd8Ig0]_e? ҍ>McI~ Pg]\]=>~=DzNϧ o{ N2>.-킞]߄A/E 9>L.\L ʫu~wLn\*X .!mM.>3 ,1LO_ᡇ]=w-̓"HIy %aA 8(0wB:ʔL5՝LB|a\.,`Q_"XyzހFcjR8T$46h R؁ ^#Rmz-;zf36=wofn:@َzGG py qqkykj$<~uZ]B .PwW:Tz)*0sޖI?ƭIbTp$J1\c2SE#JQ0OB\S8#G"e¬ ,0Xu*5n.qs=%b%.5$fW&"WH2Q_8L+`W4ީNPrXtwжc@wG St gj/H܋r7ƔPHp-xu$k?v`Uى"h+({K̚'v]^3N]{Gợ b Ѻ[~uM Ts(۴ށMPm[UPRzP=70VqthpW{*I^WjԬT?UIetMLkl5X"强D?Q{4"*z/k71}S;Uk4qVUDN6]y&[H #9=MFo-sGG{nK pF 62]* cRQxyV#M2feZ d&찼lUTbErB0\z-u>l?F+ _:79B7{;~O1~֚v˥ZjPu4Mc!9s!U7``DfYτY<NFF_ڑ[=4E̷)Kդ`Gl)UAc[顡fb[0Rcu `@svv^y]]vLe.0xMB WѺ>gέhGή޻^gGn6U)d[fzVi; ڝ IG*AQLB|L(%=mjFX]E)'յbuQхrEZKa&vɿ][RŚȜ6 c"xl^͋f>wNp˜JSjIjX-e2 b&Lc琓X$ZtSՄ @-O(&$k8;{3j|zdMO=ӎA'94gb[* SFBpƴ~r4L&ӗs$ ~lF#NMT?Ķ D*tDȴF֨BBP}JQ2 !(P"@昸⇄S/~V&'ȗس &vu"RH$,EXa_u~_0`i4N.q~EQJt[ bͼNi5kO![BBtCiZ]1LۃиVrCrg9Zs0} qlz D.C*۴Ɲ[_"/6/T6 Vi| J E!+ĺoJDRlG;"^*zWgRٲ_gƊ#!ҵK Bve=Z=z*-+β*TWN*TFE)Ժ km{RobDk2fL+ ?4JS}XN ͎a7),C.! 8m S2v!25 Je)Ҕ{#ER4țBbW5}#ߌL)qYCʸl@(쁊nX4&&RY~!PrBJ q1(HsbL$%Y'0uDmHCfRKpBk&VRXàf`C%J%UL=86-C_I$*QcNʑ Z6q U%*M9)12s%ֶX}օA18cƍ$>V bI`sQI] '`l>6Dba; ;w ~O汱Оd=IB{Bew[\æl"$cA)uMJvg+|BMZ8XP q\ .<NƮ NbXF+h+ozOWS, cQ|$J7|pvp "K>WoTσ^1&[[{bDF7'0t8sp f NfHHAmߟCӜ8(BzFSxO3{QL=cc׻E4ӊJi3o! ~3Aإ(P 1mn~VqϷ h*#% TnDd_EZ;*xgжlv׊(ׄoz#mڝ6XpKI3_2;V8# J?cgl6&M/&j05ڬil .[j@.)Bo2zTy3_ē OH< ",ܽdI#\9ʕQ!"'c{꤀.2O`JBP^ZV2>VZݺͽ֞+e/ nqy^hʟLkEF4m/:rz|OXvhB|ビ2#<2'‚l{Z"`CQfin1剦`Br@]P{\ ~_sNZNYoPXjG'*ţFb$FyLO)SI)'n5ٖIDx5¿TX$% o rnIIb6e֧>A}9QtsC"%9(L!qpLsZYPͰHƁJkX v.&ui' Y'4X<`0ߞ7!H B(σOf|0ȒyH`y0sy@0 51 }1Kx h% <l}itz̴[ȟܺ+ܔѸy>=|2> hD܂=Eɐ Z3??@P|<4\v)6glRHBuNILX>2{`6}`yq] AA! :/@ $؊Q_̝zP0{LtlRRg_ve%Ǹ=fi|z2beM awUB8$8fEE`&u(SР 6ր֤[8!EKH >߽d{> I~.nl)sS)'Z[ݴH&( la-`}5gW$.)ᯊc20[ 5_vCT#i9tz}v٩A|7;U#z,zTEЮ8Sbl+(/tj;a0)2~WZ,j"jٯ]Bo"wm~-RWS F9s^0]3:Ύ1{yZl h] SJZ7I:_}|LÒc^( s~|s,zvkM}VhEy'a`6Ob䬣79W{9b *?=N΂:n^|_?F@ٸw<Om 7 ?]w3}_'ǯ^ٯG{?7͟W?en`_;U.~tWώݍñOx/vl05!Tu+& |?"l?p:zeӛg/I~xh|7B=EC^(]^Pϣ&c_ }o`zv"/fǣssO=.}9$_K,hpPL ,CkM4deZ &aB&Cx/ޯ{/f:AYqY7^fޥ頸zN5 H)oݧͳBY}<de$ q6뀤t97I6ȦWWl.^@>{=x}l˫|ТWg߀b!yqhk5­d25yUG z`ǣu_C_ &t::;|6gѸ|ޛ) 2+PLo52p f* 8BgL޺/U˅Za p^Jd tZfn"8xg3 9sus t "iޭvz]ur?=߇)dbūu!a:=}4}bXAOGϏW9tg٦e31ȝ3#0#cW1߆);3#{ȨuE0!h LmL-O mv=bHbP sr?oówɠw݊Örכc~X n2l]iL6Fޕ.>RNoѐ%驁ƭ>o>B:6䊨v4ӏ=l &RpZ7r'M-swC58rLf"D=]ċ1" w=DgB^B '*Qr {1uFeh ); #g3Ff%_TܥeyNG\ _K/V9\fu2~l>Œ]<EZy<!ۇFoަez4kj7U^N)ݼx M?MZ:qQ -4b@k[mnJ6CÍBn5ύ 7J1ֳ"Fi@QI6(5H7J5_ɧ;Wܺe` nNtpf-PFTnQpHԎv-*ñw`qc#9 \&K7f2/{L3I`d#$'e#cVB:FxOErĔFE %AyYΡC>q IHGO(%xVm݊]ba&^YgAeٙ~pfvq *ԷZW޻ttt훜}:ҵҵҵwO+ҵrt+uO::sԡK HS b6BygY|q?_^a7,9U}\0Y:yxȣ8p~gҏ\Fw~߽{m^ڰ* 0&jN-~S̔IpƩxm)b|Y*zj"Vq 6xgʪXP۟u,@VCNo3D[VƫDF;4(̘+)Ow(ߤO1"9IRV^KVz$qbIH|(l}ʔgR ,3nЫj5e "eR8x'a˓FlWIݙY;{c/2~⛗<䡙Js9zq[za|Govv "p,[;}r/OQp\H.<Ž_~pO0|hF!m_w)(0k qEp+&-"ߝjd1  9qE)⻲1QԍC4_|A\ܘ[ 7K/~&TF4K:q_2ܺCT-sX:V.h3"{zl5C.jpr [(Aa 2gK40֜HJZx !Q8,+UhIgJƙ=+c3+)Zia4錋P9OE>R&XK?u6EC,W 1.RzjK EϨ(%,xA"$s-xVoAQ_"wu .w[%\z-Q_LKZjな= !2IRx x)#u'aKD\*ERNC_P&"' 㑰" 2 $ ]JAs`TIKHj:Urc&2i-R `7N]kS"K+|؍_wDTqSvO}UڀoVN#縜gʬ|2+|G:Z)J}KsʠKX 6p<f~S-XzL2=.}s㜁 1Y†9xS"ވΆ!im@aH#XEB`[9 MX>3w^Z*,d@yP$2et"o` ^džVcplhifG:5M5Ƅmo&rm \DY4,FքKLNN[ʎNO]64_, Zg<wA~KʿXv# ,&sőϩCVIRko^n⮐bi?5 ⨓o.2x3KER35Z*q= H5K3dQ-ft #E&J|mFJ玤TaX"ǎ\ GrXaѮ1_{R{-_@ShQ8>o#AH72 GF"NQdPB+%9Sa}$ h$ @s*g T A&q+Ș*JU$\jv yR@iKJ"ZR;>q1`"R~T >Q04l1D2̓P1:A-` ~ux]yЉ=,0)"&D)mL\+R*?,7cH&"0 Ȍj3!招&@ 9qzLUfd``I9n8 `Rf!5+)'Hj].&Ј:55?E)j L[q~"zr̠9| ="gvm{Ԅ)br6|@oB#\j4C #}u|;" {_8c..%?R"ir%lKL `:]mäd4%@ܖF[c)mX-KbgL-`Ɖ5q)oO7Iñ!y?nC(fE nSSg70Ig a*Ŝdђz6DPt祆&Nsf>5_".$4Qn嵐E\EUdy yL,Eb_.s$h.:TD 5ܶxE7L"c(L2ڜikIIڤ0wtp45'"!&[* 1VqʗpA[zs̜Ok_ |/c2S,` i)\Pt2]meGD8$6S/Y8ۼWaAd =0 HnD+*u&{Ovc$1 ,Ⱈ #}zsqQ茝p~qP|a+ʎaD}_Vf+͙R2ҼyF&1h%XGG{GVD=C`dp<}{\oNj9^ǁߝ`o,6C{F|. w ݴ:F(p`=]SX)%aI#$z}CO'FDI[_N%ώӯp͞rvw~OJvsO/_l~;}O7{&b~?k]^Sx /Y?HǧD.d5z/i&j{'DMSd:Q,d[)gyOF%si.UȤQ{ӣɸAopxk8a8jmv*ɬȶ"JO]RBܓ\w3fq(/4nVD/}V.7MiS2@n4y=( J3yϡZs\! 7LOUFZ2?)qΡ9C~\<:1fi`7KrUJͫRMU{niIPi~U=-745OmV'̷S7;m[\:Z%oXwpZ+R{J7!V) fV)kWDcHkEu2Y]x\zy.Nvqه1/ZLĒWlp%εΒߴR΂ʹKWatK[}7+|7./w,nJWWQvZwX'G"&~E:+j'ʩ.pf}.$V|bȱhpqfUרxN(;7~OS ;OڝTfp"lӤ_+u|X:C9brYr׎h(8д7?η WC|sVK;+Tӽ|ꢔg WWc]=/}»U/nqY1ƼFkyaU2==2pp 9Tn/tg`׷{սuj `HW׼3|߼6j4hY7!46տ{.XЩ>xx8CloF<މ/C=o͛OdN?_!qoC3}q#O2n{5+v|3k}!Y޿?EpqԼR}{i~+x yD{㶑a>]r0l_\1dBhݶ639IG߯f(iIc83EVկꪏgŰ*tho_β/~p60ੂ45eŸ/͍) TE{Mp^?7Gyۼg׷,Kn_,9>#ʫ_2}[`P˅ݫ^/3sYJrHO~1)n)f@o]O*Mt6l-77/WEڿ{\gֿX TFa]3avEy0YM Mɦ`HFB C $K`&Q)QX#"C.:AJs c&:2=O%۸̧e>.wO.j*3XrDQENS-]q}  pg9 GV(ž(*,ۣA,}^dře7^[zӻdn78GlT(J]inOx5Rh$*DDC"p!*Τ4Œ$MJ9g ƎS_T$8j_j8D)TFƮjBL ԇ^HucJC=7^esn=jIУ@ƌ`Y0U< ?/L{(S.SAFz{(fJsAɎFpX1̷nؙkضe icK-ʎ}L^u\rŽ?~~ nڮUyaaY&:8i9gdk*Yg3>XljӋ ;rqu rbV>0UVHz%bk~:ta';^9V[ju}ZB |úxU;oƣw8Dx\|7*,/G3Fוs%e&?ww~a&KCC@%B_=XTT|hB0v f̝e{7pp'ohu(ߖ~&h ső(D5>PȄVN:>jH ƨWA{mR* /N{ QfS2Z[`,EZ-X% g(<,w0l~ywH pr]uv3ANچmrw]o|fci7\bF5ܟ<¾- wbgdkPdMsDk8MrN3"'3TRҜ9#805Εp*dڌ HR0#iJa+Ucf GJH:5k&W6Nڎ5&R} t&(513i*s M;ZF ~I12B!٫i5crpJBMWѕQPlkW34< ϟ& $|rap 6K0Jdt4CbW+ _yyӽIx1-CnC)ŀǏ<Ԩ7 m~7B0d.^T:PuIFKՏldi濦,P8 )D9?=_!L XfnMثG{GuFT %kB([e~yzH.PN ?_^ZuAC/n>?ef}(-:t~uG;潃2̅@_C;sZpU[?EK̈́ u]jF` xyXE|g?݇TP*N- ?7P5Ф$h # g"f ӠPHq fPi0l,"ſZwy^_Ѫ˭ѧקv]xJFzVG'=T8s j%NPjWp(NLX+,rmYaA| _b$ϫ @{ 13LlN@b A#7if n,éB`j&3<sDPI|L[쒜S`$$RB49GJ,t9Dml  o ¸%(wt~}i,0jKo(@)ǍhwT1Ku6#:ҁwDE@zq|kIYpp-z5o_W;tg?i !]‰Q̘ !%ٜKEn}ĬV\jp.Sy:)X2TF.SCD/5q .w'S } 5b rj@>dB/Ԝf`޶k#s_`phk0ItaDT]*eR9o36B0: N&Zg F[i՜=_&XBSJn@ g#Lv#P]͐9ǣyj Ro'7'q2ZWNM ;Fa$!؝Cdc0VA!,*%PƘ$w-6s+7v-csSG=+VS!t]A@Q۲s  {q)Ǻr(Lʝ! Fl^yȄ^;w:h&*D;[^ +_K8jVA; S8?s_g4`.X<ʇf6#ČVz|qC5mo(|BIo5Oc kTfX4=:8znuP޽dm*$VM%X>[:q{÷ `&NG #B3 9ÝO1?[NFzmpgT+7260c[ eG᧴DCG&*laOX|HuB8:a#̝ D~hܩFxmbW;&T4.ˆbbcVgvze#jThu] s]@6ؘsB}b5эY,}^p`]av=|4 \í!{'e''h_6 -๽1U~s*.`R^B)r&4>3*$ S&·4I)r\3{T, kdg3)7Lfe?/lkgC氠[9Β$È@d.f{;!qKH|\XԔ/6yv$|Cf:UW:yx@Sǘ>..Wc?~պn\:->^NQFK8a$É`"c]l>^-:??H c=_3֎+dfS IZU{ƾ$^hܡ~8TiqBqh9R\_N- 4Q0SiJmS,-T!,T>Y4|ea IR:rM-N4 T^BQgLlv,w:f᭐vlGS6a2K9 ~R)S0o'%3U?7˖,rPRp20)]p KI.qO_(ded.nO60HzI[M Xw3u"% kDdٻrOfn{siu V9s lOT䌥N߭01d Vt+Rm9 tfiH)瓔5Oaǃ}cI.Ư#xWoHheAC&[%EUB,!R96SFA^/?*]`}Yϟ'+J]O%%&.WFf*۾5?*):N]CT*qsj2ZJI6K5[#8jkl辘T>lgU SErb>_(**Tg{$p=5oW7٫՗$-&*5T gkFD[2IDVQ vԽ`sjOpU!Hik=r ɢFBB>r :i!j *8n14[[ϝc $6+^aD ":- fY.mPEUN'p %s?r@".EE;M: bVp%4D w>iكj6H"qGZ$gRR:HIb >mM\M#kk0gΡ>RGJ{r]-;AZ$+$3΢:2~ @M1~ސ}hgm.,Sv5,b  [ˋy˚[\?:Z'c)VoVo+gYիXbJL2>>ؕFcIy5Wbh7V>@0Ówo5?j!ixݓV-A@Wt -w `&h78ni4l_a6rpLŎrq9FEQOg²QPі'm_Z>7|.'%A@axW~}}B&WYIZ=7E0whoQcq&QǢL:TI(8PvjJI<.H>U|={9ֲV1e, Vq\&gq#hERgOl( 1X PB$ׯ0RbAu2Z&)<9R{VȪR9"Ǣ!2#z6"MDPš-B*P;!`)_r3dtJ! 9sFQ@!'ʌmLZ'}Pio"E#d-D  1ҊT7%Y: QYupZf,n2D@rtdO,XNDᵏP$r6N ߃9^qNڋ4T NKɤtI\liJrzLZNRЭqV5Zs1@ 1 O9G48R  (Ix4zQ&BZ;R|<ƨ]׫81fb}ݥ,&"FxVRS< C-bu wbmI1iLȌ3"J52h`h/&z9\Gk/ڻU &y/EB=޺gn-QM^Tmn,ջ1'O9|P/!KJ"F~JK\m9;4=ޡ`nH Y4].^$U;qC6Z}Ō=/z8n"~cOwwZ#kӸ yRZ V%9vey"j? 'K7O.{ j#xwdG_]໚}QtՎ"wƅ\WHTW`v-! EJǏKSvoӔ](7$Ё"W{~@Ї_CSjgܠd̐I2d`w4~ @ףm㾡Ra> -A3M{)5 @|;Ph.EuAnsw\:GvT(Y * <aI.G-sE4hԴL Q[hpQ s) sIܓTB(Z0ZDLG3PʣwHa I<4Lb`0i ZDZFY{Ce1`z_,I>98%іP g"/&{ ;//˱PC kYJQ2\2'ufGgЄ U:w< bPh{ȓa[@xD xjS@HJf{)Ũ~r;<\/Q0;_Ac R Y/=];}oٓm@cynG;pK ulozw_k l=CltO g~_ Sw/{=B uz_(w~O nq;Mٻc&01Y#3Ln2Qj.Wy?Q"?F[ vo=ښPKWg&&'ؖ ԁa}=P5a=݀^3?eUr/{cRC^^YYYv<H1*YJ2!RdX0g].G[F;QԆkS62gW#o ]}9Vg@ZkPc I}z@{#ແ׎ﰲ|,Dʷtd%LL1b$5Bኄ=JMC7zÖ;Ť fJy8zc7+`.fjc% ER_)&]tYo]bAk2=<GJRm9ÛaW2bk sPhxh9Oߪ-+{ ^ Jw#Ym|/Q?tmȑxf4fl I|z\ֲ?͆]ͦ,!i-NeCF3"&wE n#p̉ Eh}z}r-JhkO*sA&pﰟ?n;ٮkdDxf26U{e0*5䵕LcHh('bA~f޾hO73Aɱ[p^>\Ӧy)ɋen$_ Z1KRwvǓuz[~qڴ{e/Nb;?4˗UVJm%ٟr/~'rOm/N{O-Ri75$6Um}&W]~*_?Vo8o%~|6eyŨ}8hԇV&?)Q;y2%+JE֮"6AJ}3J|٨{~Կ>oJ|WG ;IiCTB.DVVE8' FQ\XVS@YIID ;70hg)6ϿBzWF"9Z/q~oYch7*ջD)4οO~rDίgyeB,9ėc/>n<o߿~u]e/W-/4~R^W~Ǝs̸Z<$\ 72\[(lOK{6O&7^[ܟŅq7zpf]gx6I8߅tQh.' L5ESz>s Z_L_os&qOK??T#y"?|r+ڛ[/N#4:u!{uYYy6-K(, <GƲCg?/B}sl>-,X4KS٦w1Mgpa>'żVʩ> .n..[_fټC-+j[ \ܔ˱J\sP)9rjST *J}≽y8PgU}۩/I.X3ev&ȌhɛNHErC\.ro؍UsUO>* ()Ta֞ĮDr-O;EҊ=j9[LD|}R{ih1N%([xAژWIP1 !FOw(llNݙ7qI'rLkm#G/9)dh ^d +_?%;l[ݲ3LbHVHMm@0u2EnڜVo`p!7!F`jboyppwqeM#XgBnѲ({X[+XR.~[(S>ɔS)L?6; 'Aʽw._߽=rz}ݙ-R1ڟMߑ lp&<ȣџvщS\781q6]חglSn^_7'ܜ}L:~eB;}N57$stMvQS!]pސx ؇ n܃ &Lۦ1<<!diɄGF亼{i'6HGnB!nu ѩ YY)IS ; p>kY֐7TF+PhfX%]7ĸޝ&lL|s=Ľċy1b`-C PFtgCH5[r4V$<9ljV$>Řtd)j!װ1>vucyk| g9Z_?Ճk-v6L~6>7KORiU' c㭺;~F#+;2w2eUCfe,7s_Ѻmÿ'Yz];Pp~7 "]yjͷjME`xj^5!vsg˰r?ÏfTjtm{S-R`Sͥ@z9- 4LJOkFV]ϰ?Hgtjd3ӄ{ d:*fףh '[^ROo~u9}\\5LąT2hˀ=̔Ď3t;n#UN+쉲Ů[.\i3m| T+\\d!DlW5ƫ+_-V!;F_uZyk7ڰWn16FMGbeb:c4n"N+]{@ֆr])l3Xi bqHޜ][vv?Z6QպrEKHocיPL1GT sE`f@oJY\Xv=n+ ]5AX)/6OVzV,(휾Dnu}JJy\hRv[FN˛?Wov;+P}wCS"7\dyܲ51@1BrjьowjD8e-~JZa AKW^h'ܤ܍J+'`Ƀ"#FD-GK݌ A!i3rI?x0r6^^3CQ5M&@'z<zzQg0&[e#nO 2 ˏN6SQɌzyMQMtyMrc;pې9x)ٖx9pnZ9@x(Hh'!$wkvk@B/ m>]iAn^v ـjЇB= z9F&tat!A/{2@Q6\8p\b5>pSmUτkx81ks0ßߒ=6 t#Ӈb'G@.oٙѺu,. 0-ywIm_4!ku(zb5N] 7g՗8ϥ&22f?㏾ޟj!DM5tG]+Jg;ؤ6#Qm /V!hc4qq6 ~000 y&eSR ~^nwAĎĻ}jo]z@ֆr)GkH1`XNM[#%̻׸z6,䕛hͦ3#ec]!=l^vL@KcN8e0Zq',G!l{G1q|'bfsoHӺߒOv>WnFpXDu8]%e*ٟ *<,2P)I;LPmhXƾ\vJ,`KczHJ:<_ZUr8ڙYdeǔd\P+@t'ݵ;jj7E'3)QUno 1\K1,Iqt4*MO!ve! %(c^ų\$Yil&34YdY LȪ@5*Ti_P (+ĀO3af:QEd& B9ScP rQYJYP#12"ż 1OHst)P2 pζ1pwS=EZ0$S΅1ML+ pRrbClۛ |n:}'& ?36Swcz"]O bFx'm.f82 (瞐:+h8b _xA_(،VJe 00Z :]A";!M\D@%d?(.#:@HV/kjCiTs0f_U0o/jZ% *UĪ/UT{Ҭepۖ ^)Hlį Ijybgs %ı,2{E#j_}}LDpv0] zbeS;FXǯ)imX+7m4:4l@JE=F\r*9aK0ISVAw5u%it{|t΂ c|;+!*Ynu]zB?n+ ]aaK dGmaFݢpVFVҰA9qڵҰ%NVzVJq)08+ZW'NVz|V*W#k7:՚?YQ[iP]j=+EtjS9-?KvV+Yn, 4WM&\LĀi",&Uʘ( [-bu[613CGba v=FwFd"FVu"t Bc[gW[-Ag"%lPm}ن\V0:ޞ|V[\mRۢe:3Ly h}p5n& f5@B|Хsc9\٩$0^  C CLROZ6hլstYF})(9{8{'Bhm}W:ޘ)w \ ΣCeWT 76_mz[?krM/i늄+?/}pkoiBKBN^ykhNd 49Tփ@mH#>(ON7m*^KoOqXՍ~2>✍J/6=O+Gӏa洔\[1v?>\ s`.0Y`iH%^ҪJ-/$"A Y2Ssh矁]rt>Z>N9z׼\e=M+T~̖Oy14]:5uȥzjjZ|\n2)C=q8q03qQ…_.pQ]3.˙]̚4#6*Sm@4XلdYђ$ϸ؁f\5@S xĸd5.L<,։e5Z C.D^Ayf&C(C9!vjChKѱ 9&6W(YdEjĤH%ݎ&"PF3 hQFqm>nAir~rĭHt*:u3E9Iy&.,jiׄ' 787G*xp a,3)/seZ ̹,#t#d({Nޤ.3#EއD)],[YB;(iiI>~ ?ÒK"/x7_|yޞ9f|\_`ΜvR?\`;nU]o$qWQ"@(p 60;B#\F 艹]Ýe-LuUuuUuկo/>Y9V:.7)p}R0$+%Lڋ~v}dp,jܿf8:{yf:h k'pOtMz}3 T9$l破VWT$byW1%󑚜'Q{b8db%Q@G=@&p+h;C7x}E aGN7*|)P 5|*!% 6-~wԓ6=~"h)-+Km<|T$7ON"]wu>%q-#QN&(Z,P%<Gת#uA$(LK"-s,\-r<RWY*)NSMa8 U^?]s[m%=u=%f;aMa_׆>x("Km;=Z\\"cȷc,}gg)Bɔ5zrs']Zr#ł>sWN쩛':l} v?V~'22dsKxȭ*Bkx24l#9+<$=`]耒AM' ԝ\׫2Do;*?zjKFO TEY Tn0hF|ugQaȭKnekНj׃ XWXĠ~ub%Ш%:񆖆 $ 5C,;pe_.7Ǧ>oO-\i&*+z3Xcނ" .6; +FF[6⒄,6bnj! |w\eٖ2}了y0f~ͯMCT+Vl|D.8|-Χ |5?lHǚdpSfݢ/Џ݄gP+]ՏNUυ[ 4W]@#xiHpS k,6b_Caxa k࠽OvlF;ZZx^\כc]MBk*^ ҿϟ>}ZGW{Jfs$Y当ۧGa2JHԺd`t90YdjcB7|iMFMƷgŌ#Z)N joF0 Փ&$ Puیm3Z6L.Dey7GL7f%5=_aǮ0}rkn׸x b2H+W]ardN漆yVxOer,*I>gXpGꋉ ^V@?6~zVk&FR1$.)A 冃L)% )[%9sIJo2ϸf pc0g"wwQԮ f~͍95ܖkDɬe P0+qL Z9kOٝJYќVFJr]i<-FV¼xZie So>N`4x=C&yҒ^ P Rz:9B 82)[Z1Ԕ'k4ay9fEfS)=+@0Fˉz6B!zmѫ} O`aι%RLɖ @lGBI\Jꂙ̐J-YbJE&DY7T4N!qzrݮ~@n_BĂ:N|&٠_>}7ljjx&^NczP#/Hjɱ`lӈ"龄"Qz60@PEZ0>I5Xe5A@% Ny~yBl[%CM *o7*42]B˖oWKn~NLqn8?>F:oGزK<5L#\ᚨPA%# lxAkpsGAx'CHt \Yd.yp5a \88acOin@[džPӒeS܍U0PsoPzլX-wa/E2W Mf;O.=pU _Ta['ǖOeY]/\gv=æ glwya_V1eWoiڒVa,|uQ!=e;j[)26\r e Vhf! vXEņP+AMDRmJ+bج vpYܲ9u-!k%J>WN#Ipxxxݭ7ѥx^$Jq00O! ߺ:W!hg 84(sj-F wXcdTtqgkb2'Z޺ ,)'XhCߕa<.ZSBَ`хݩVPM}@Vv  8BͲiEtFg-~^ZՒ%W@nfQo?/vm;TAV]msZuz!G>?g79Ug}4\YW2/nf~cK[Х{y~[W?"~3jݭ/]=Q!w{UWgX8inM+}EbSʥ' y*SF~a݄ZXN7BtޗϺKyZ>4䍫hNӈ &E eBՉFu2?uoqn}hWцNO2_E ?xvϭh4lǨjRmŹki)2ڢyTI5W/Y%jiqTkg[zZ(3(zԳ uQ6*䬥~N9VN9i)CKQ}դZk~nscT_{ғR?-EpSuJ-EҊjg-=a-U̫=W!8-=NUj˘\nw?bH?~\!:O"!_N%ĒS^D.yFۏ)I?Z`@߮KF"̳`egXQCyylwt_3Qo7[11nnHSd\%9[kKͲ$)3-[8$&>_FL,Nxs?9?my: ʼAeEgU^CBi*gZ_a貱!Dc{/4vQ/XZ˒FtG_IIER`w7B~Hdjw79l` ~O[iмUgyyy&PُؑiSګIz7ՍBw֍+/~on]z?mkLX>yAB.(%Ur X %qPJ?1K' زfV5 ճ P=*t7аF-U 2TafUBSJJղuЪ1.S@ՒZ.ޭ>z:?\z zѳ~uϱD駿}0'@+dcW%Qmc]MqD Ekf?ۓ;[e;֝?"k/Yx\fOf[ܼ0,_iڧB:]奟ktZOfw>kRY5r뀂@#:agm6e'['\_Ut2YE *UO_5i);PZB#w ۲1"KV0p kiK2N?wkA~Ɇ5`ougOB5wzAbpOu70m48jYzFqw$TnQay2i$2);p7CywC<% BʪލYfqjI:aT B~#?̈́;L[D;Z3%?M EL uz2=]= Dd so0@!3 kئDujH.o.:}=os{)i5컽n&TqsoNzlup{fvuw}{Kw^w]A*~C}X]z>/.fwlsRa\zo*owK1r)d>FdaIQ tw;g^/nuxΜ./TXy帖B>ZcAph-t}o辨n-H3ĝgyI@XĨI0Be9Vj -қ:!=c[%M /W=$Lq}y(kj*=תj7gS7sTU ?5m7n;HgY1]} ƛgΨD3f ʬ88Nr\ ++ṿvz}ԝ7yr:3dz muG9p K ц@`!vVzRG;|YnF`PzCZ ,]骢*PktYd̶i#jǵ(` [ 5 R%V uMrM\QW¡45hJjkFi &Q֊)$  -W#71($ut;R}}r8f4hYqk?=Yjj}:⢽;Y9NC 4DRj±ZMBK !.bPB-Α4ox[e@#v ,<8lKfv<9*zgU A%tuʢo/fE)ˇâ QNMŅWR١籷N39Zro8!+y8~VJBHR`v" k vxz{ ~Y3djdV.48dj9JfEs:ch߭O#Fӧ_#ܢ@nB*c8Ԉ(p }ER̵z{{{{p &^.ģ߃ 4`94+}1 [,ڼV `8X^0;)NpaDaQb ɘN-5bQI%ɑWBu*D:9bppUQ*ibR014;ZvnV!!_+>vSAsB햊A~#FMhxv^?n 7-|""SZ[ϵw*NR1oh)?Mg6jLqL-+q];J!3 uІj-ui[Aۼyy[Kg)T­ך)wG`)L,::٢:[bK[cXO$[\ܴasA+-Bf } zUqpB3|kQ[D("caIkwTN䲞jΥhb5+uތBjA5tA՗A󍯀f=xԦ{p58}[s9Xz20rlR6#vԀx>G͗=2w.qVyr(P(0>%H\-AxP ~2KhZTn[jtT- *k .1R4PƋc PDʔlB@lNf bkGK{:DȻ5sC>q?i%)`CQ40jF4xo;Ͽ>Hk#Stz31zLH$ĸ+ÛFϿ$ëHe?3VGgedh͆gjE(p1 x`HԤͻ=Y,5Ov)6o}'Q(#xl75? 6_DP Phsm{9g lPRF32\(o\d kϝmNZ23;B~T-)[\{j@HX|yrr$6J1ݲa-a}:XM5I?R"xVjz `v[,REj8|p0Q{N؅rĚ叭.2μ-BJ)Zeby4 ě ~ n9ƢD5 #`V9,x褩kЍ.k4R/H`ذCt)TUʕ*#TeR. K@W%aAZqJ-J-K@e# TЯ0,'n 67uōW.\@Z:š5'WM^T1UO3dmMjW@Օ./ NBB`Etrvw@ n[ԡ zdß/ 7@ Vg @2ҋz6]d 5Y1X[ڤJA:VX*R'8O x@-~FgM}ƣ42DD } "K˕VƻZ־ YNıh0"I?pbQ%ۢQy4=; oQ"h{)pJ\pnz2?XNi{EsFah~6P#16ɬjd1ZNaoIX`7YNnj0vr.lzO#)a:6hۧOVHi1RJv+Dѹ{c8JH}jЉVZurAUG魘umdi-aeqOSh¦21.1M 'ݓHz M sHW.)2ER%LIչR%b$:߈nYwiPABrM)ݱbcxDCT BD'1mB/!Mjhoe**p}W-JdAAx٢GQ" VڢSGE;W7vjk|e>vYؽKBfS ʖ-UI1282 d Xce08^> gPIF 픐B-=瓵~V!Ma\6=%mVTʪȨ Fpzf*SQ-J74KJx>?.wMNjjB\l(z⢍l* gvѼ%vF& F/ ΋M^-ޟWYZFimFEOwdȢq^.w./g`I&GdO-lݲ;`c*V=bV9l7կן?޵Οե\O(0~ڭښK^_Zq|ٓf 䀈z,k;2@1K q~M*н}=[7a4M􀓢QhЩtڶuU/EJt 8ero:BE67 =@:Ix/R&a fbP˄aԗmgRP*X&'CR_0Qz(RL×Ci-5i9Q*)/^lKM\gRs><|)4PzX˶$Qz(9 r@)8Rœ=7JšT_JšZ+#yTFR lYL2җRә1N@!/RkQJ<*{ (S@a/RQz(( {IZowmU?/>U^ղKGQ?YS)yDu!0Ir&ɒdxx3fF&L1 r6Q=6A{4d-h7?åb'jGk+Ock^*/sF,, 9\JiUX.|@y˝Q0b=+_e=J%ΠxSPمu gjj=߲6Z7ȩ@~s@~2':Dq)1,t21vbR*OXPV lXdTXRڕR 2XJW {tUD'l.Jw/%k :DYYM ,rgRB rYB2W,7bBLҶ׸.<P#&yUJ9I!rW) DQq^1+o jg #zF TITU-$>RJ9VM{CoG~wg:Lr@3X)l% cΩ H9lpLBK?; %3enKtdLP#`=+vWkyQ2໛^ ?bM봦cZ/0p&^N@\@e  ,ƒBhEZ?{ECk|7cAd(BR!FGƀLҸ£Ohב#`@ Z3v"^ b_Xvu2]EDe n|9ntNs {x6E*S4kzjn Mipl+ŵRfQyc✼Q^7t8ʴ 5}aĶY$m ;ُK&űKC i6à͓*F}]x2% j*}U@TMMN3NHe }_uCu$[W2?J>0E} nvWIԉzB^M ɗZqՌiSϤFNeb~G֖yUoS4Xk5Pdk>!)'y7 fmSu[}yJbyhޭq` AL~̻4|-U1I.m,$λ׸Dz>!”Ĕ7L~ԗm568sD)eNq$:UǼagj45ն]4qSOQsn44-f0؟7J㊧suKI<,e[j_7Q*LJ JCi-F(=kƵ1Ji4%Fj-inp(ERIѼ0p fOty?}y7KwHtEl>f0xGcH֠@:{A0KXf9D(F!0Ic4L;Is1XN&}h'K.Ǐpʞoh\{"d,t17/P RT\JaXj++ \x,Fd77({c9ò(gk;Da]gkpkGƇ3HbIXT z0\<ʀCT" -k_n_2 R %dLHső&*췿#Hң6 O0"=k1y志k QM~)z2wy#Eq[>wTNqu+1nC+l8[o]hC`8jҁ3r$'M/IL!&ǏIUy`i3]q;pm\'#IjW^7H;joOm7کX՜R<6U|YB8DS0IOk;F@2'[H3-$o\*է|jm#W~0?<7o~a}н`4}oޟ?XO0luGx_ؽXUW!_~ xi і~Ym%8z6W~\\͠=JĥHBoaH_-5p yv@"dtq{bްGw32 w^?U_MPY\<OC=K;_\ݣFXQ vy#vps^y˼4۶ Tnwg}ZmQ=J*}=R_y`4lO#ƧNBi?L*Ftʌ1[~7nVU8W0c7b)}6,dK B%Uv;KvQ$% Tu2rPG7~2юgn'7}֣̲g ™98ZYb/kI|Zn9G[Es=^Λ8{. ;1[b9W0D;~Qm$ϋn`Ţ\X.{JrΒIpnWXb:w(¯3PÜ D0 Dbd0y{:@a@l0$ v ^0J $7W( AMZ3H,rʃO*gNaaT\I/*EEUL:R:ƚ?;٘LyH=+*媔ɇ*!S^^y !iV3):ԆUVwHL%ɘѻ$1A&d\tc|r/6Dq2>A(10)ާxFB!# E%hMD'ld@#DRŽF#ϺAvLE̩q7lE 9Nd r7ɕ'!f 'F|ÈYKƑ.)-VO©yi#.^&R&)!k48'L¾ 8}3(CɅySSO{ תQ%pCa?X&9D/$Mrvp29lQ̨APa CV'Ǐ;_)%5մiTJ,{; KY&wTTm zUց#( 1RxZMS#Ffo0ϋ^Cu70կyb^O8dZbH1MgUuX* o:pVTe !˝ pQxQ<~VmkS#@ ʄ&|^F($n;x庴{ɏgY;| *CK{It%9\ )XP 6?)>aa\ YJIJ9_js%@ S6RonP#Q#ũnL϶vJoP6f;-^G#G3fR4pv@ٖ3` ׼ϕyO S=Xt5] ˾ FbG____5A,ЕVar>`?{WGn@_rddp[c7ѯ^a%K'6ߏI==/*6eF#VXX$m鹑`M('UdF' 829|V7m3c?t>f?]|]E5?,Kam_m+9jktr`J ŃGPʃ\] @k-J`TBKi"GFF&l~7i$&޳;z4\MxI,9j3#n# a B VCBp> cK nh<Y+q}seu 2) F{q'[$*=:չ# C"0H˕ OP@m SYxIĬj-eM%]ޤP9Q?A^!9gt+ SxݑD挮Io bFcX^#idz aSTo[wT@IH^Cf~׳a:¼ꆞu cƎ5}wSFGtRk8:}ם`1Wewu3r|!&:}xvg[ y5 ZF'Fug8-6p?+߭ʻ:khO,ݝYPCvIfW:wg7Ws3dhxjMcVC7j'!_LLl*gqs)sl=!hMf6zXP6۴AE:w%z>C4S~n&n:@'!m]yx6w!8D["fKXBD h/~Qް QJ{Xkqd!JK9nu {KYR-Fz=Ƥ 3A; =~ݞ ;x*Jw%SQWI 4oJb =LME+%ƹ^O/0WZũ.rg*C ԯusbFi{g1qaXsYü:|]8Q(?R-: e#t2v65ī[qK!SFc8qޭi bE&{֨{<,"q4.Vɞi}瀔7l=Tj]g\zOϝ=O-+$j7mjAgH|ZTU{1׎u~Sӛ%0#Uw|=AO*컧VһjY/2gcfHz[0p):~(kѵa4T2eN? s;\=Yn!Ymw6aTnyW~S֧n4g'7;wF֦0E,Y.H+TMmBڠi`}I>=ͶQ՚@_]b kVzS jpK !.`~_3 ;Z<}Y2q?.{H_/KYN5ĝ`';d="Xl-sO.dL,p_`۝ׯg(|z&~Ѩt@'YJB4K-\+ ȂCΒҖy}̓BnRG+.E[j?9v4Hn^&O}R_6ңF)})JRٸšH)=fsFJU(=__ꋶ8Q x2}R_lH"J9_ -1D>/6F$ZRI\+ңF)*JVA{J:힎Ns+)zJK}іZssǍR4rXX.8ґ}q]S^y#^Qޙ>4$gOa߲8wHqv%ٽoR|"xPd|$"b9K/?'hy0&ynse&F2-fzrbNaC4 od`^!L,2dɇy;o6"PnʳQAhq4Sx mw,"4٧K?W?K 19DyłiFM q+cwW\_˲.sS2P:Tb<'nśXz{ɭ-wwnکW WI/ 28:&"gK#7K6bp$k0Kf6X?o׫9{qrad4~mpWQf;xT~аm>n4;nvЍc~pO$֏O~)&Ɔ61he.z6(\p𽟒$ Z7ȓTf0Ps?1gC_֞u̶%B>ퟄ6f/&;]%˿B=S s%T}-PJ.p6Yi/_ܻ-F׹e}| t.ڳ}Oz z7^ 7Υ6{0>%36@ D|V ٠ΚVΥD;W:Ć;8mrԦp1`HGzLpM;j?Qlta[݂5L#^rj`fT]Sj}Sϟqi/u d"]Ga*JBtB{ tJeG҄hH cv}{T9^;+b v1zifQ73UG{R[MH9j;~nt?ۮwC]tDwhzu4>Qr o5{PV(`.ZR;x2R~j<9Ymx'7Ɠ= h,?yӻIox6DΦ(ڻg[!MGܤmz7[(ЉFwpo`h-@7ΘBmʩ5Jm+!kRmv}M;#򆭽 f O*էP!/8Eersv(ZjN 1Qzn]_]E5*+>7ZʖۻY~V^*5Gp+%j=l6!cG Y6wpaISd'ge4OG4ICp^?GcqhшE&b`M3[ߩE2wN_V2Wð'Ic9{s1Ɨe/2ʗ2wU䶟ՁO@hGɌBn4F!t1NFApp:"Yq=/e_'o20OkΥrfeΕEZd[~?E,l%brkvrՒG\0l~IZ8r8IorK/ie#HtգٌC1BlBy2dQ4 h.ee:h+<8qYG` PP:e,5(t69 2J*(ժ,1"}7fV*إP>Ž$T=#QCVNw&XR( d6qJd\YᾓHBkψL2mn[Xh!Qjf02їv҂pdKSlDPڦL%hጏGd-]x.50E;;͸[>Oj3"cEWd."]d8hPeH;ͨKrQ9b!1Ӄwa"LÚ{*oGNE b8.Ǡ緟o6F-AH *sY$r͋yZl.m̓o`H")o,ʼn*8S HI}і4NyǘGJzn- &0β"MK*MUDHuV\c 0inrԄFcQ²[hsZKcNs$^AI|raAtm@uBQΠ XTNhty],ᄒyyH ڍ+97E/`~iu^;hڢ[ cFS釣$(ntG"6uh9ߖ^Y8]?/!+a.3t C|7:۸|7&YOvȅn|Y6S`0i93 SǺ\4y?f?\~-z|M/WL+z!g-1[Y #.{b)y2(iBVB&e%:I3Hܶ[JJ]jTΔA0Sپd SȕZ0  4}.2kW~aDʼ(lR쿩u --Ӓ5 pP+Z͞ ZZngvX}VY)]HnMTR xC~HVp9@N,%-was\-{H6"ߘ{,?PrgEv<ٞLSeL<)lx\'4oeZ<;iN~#_z&AOf]@Ug.J=) }@o}a߼aFZ6DIX u?V29'5r)x ݕH.d^eӼ*0B0Ze_ϣN XS>T׏*_zwrZF= ֨$Q3KY#6,A aKmm$UOل2RjB90Ș eHmBBqF`J3]ӯoD}GV}>7lq2W=R_6ktslW=@<(onnD)0J+A(yBQTpJEU7;%Jzm8Q*KVZ7N(=FGi(ֲ8P RP꟫|neA|ueT[[LQlG9%G#%~Q% y{Կq}~)ώÑ T9Hn/乇 Сήy4NQ#7" jNa} YbP油y,몾~i.VbbҔRP:Oǚp/'F)70h__HKXm|xi+GWu$hXM'_G&\}5GߺF8Cw|:)̝qh^x4Hmm@+DmʑtȏrJbX 1 6PU[ï ,ZW5pA Z A'Ǐk8 s_1 !eZ"UkKv.떑@QkdTnZc m$v 3nZ=Z -[)Z֠MR7܄jL.C#66l쁟eiylh ϭYkp߇kݩ01׷&}-'_iڛ"޴<a2v I{@!њh#MM$xH#JM#>:PlY.50`J?XDe$Qȅ!@ 31[y0ߐhPVǝ@Md4hg:^$Yf8v^جf}]Y&OԎ,se?cٷ?X-hlotwOOb AQQtDprd2Nk_>XSM>g;*ۈ'_#&D"kKi*"y*"~11zK}ey$ ê[j gR2ғj4;#Cj& q92vŖ:>I͝ Bs1bF ]Բ.4I:pͲ)#M,bc:Hn3U.Uc{H6pͲe叼՝[.16a2_ ݆n]2}s~Ml&K;k~,= 'ҍS {RRPMΣPeuqBCMMS /p(?Ի\~iB6]z}J`Kŧ73o~l>{]+sVۻ`equ>_?5ڏMK/u׿էvU~s|;û7g=3p|N؟۫͝y3jw:J`jq2 xOs>SjVV2 ^CjB7m+(cHui@PzKG]ei e3jbݍi7ۑ]9>*\ZpK3%NVld^+u  VlAQq8Dxd R&#:lZRx VhtqJmPA -EI"eYHk]7M&v M22FZYsY!k' Pذ#W_PodoAs&8N㍹rJI ]%1o'*rpTsFt mI18ڍC6H;7\@ /UZIA+cF+*Cu.E> WH4ݏ2`FڠR<#-*.JEgLԅ=r(E-z*BcHqd}bzj}khO #/q^0vJF"P):J1Kܤ15swmevJb)1s@-tu-;QƖEŪr>,tкlq5**sߦxx0F\E){F#T\RW$eAd<ʢJj*WBj%m[h{B`%tfcFN3v\rNuS^7pvwzw۟M[^m)u{wbH1Nnv!W~m[5(Ԕ)\qXY)_j.KB[h~W@X{X{6NP=؃e* MQٶ.YYkLM sPSa {\-(}nx[e/߮)wՊ"˝}"oclb˷> ܚBKq;Ed_'ke< Ʊ;C>|Ց+vVObOVS!A%n2Dn@^kbd$#C눑x̣|:r颽%2Q?1~ La3=QZf ~M5=yǏD39 syn{{ݡiuO á1ܰ=m"39ۈKmCywq 90 %{e@hI)3]oo9+P1j|oێ-i UwXX!uŸ凳ah7 \<+b8rER,im `JŪ)mAi5|YVZՅ2J,"ya؃su2TVb r۸ kZ6PF-u/΄t7j\ގF_5?YG%.+V[E:M A8a ^ICwzF g9g10;AKd#kSvR8t*g t3b3E˭@4`ʳwq{-m~ b|U]eF7UfD=]cSN|pN!H3O9$Rv\ $>pHDԳCtsDD'E!%emXαS<D0p$}eƌ\ ƤC~#%cNt7v~~܋B^vmʾ `3pzlؘ7|O?laLvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004206165515134222122017700 0ustar rootrootJan 21 17:56:04 crc systemd[1]: Starting Kubernetes Kubelet... Jan 21 17:56:05 crc restorecon[4698]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:56:05 crc restorecon[4698]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 21 17:56:06 crc kubenswrapper[4792]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.110660 4792 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113428 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113449 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113458 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113466 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113470 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113475 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113479 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113486 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113492 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113498 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113502 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113507 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113511 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113517 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113532 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113537 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113543 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113548 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113553 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113557 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113562 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113567 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113571 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113576 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113580 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113585 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113589 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113593 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113598 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113603 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113608 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113613 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113617 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113622 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113628 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113635 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113641 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113645 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113650 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113655 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113660 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113665 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113669 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113673 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113678 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113682 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113687 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113692 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113696 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113700 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113706 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113711 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113715 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113719 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113726 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113731 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113736 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113741 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113745 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113750 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113754 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113758 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113763 4792 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113767 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113772 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113776 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113781 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113786 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113791 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113796 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.113800 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114304 4792 flags.go:64] FLAG: --address="0.0.0.0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114323 4792 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114333 4792 flags.go:64] FLAG: --anonymous-auth="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114340 4792 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114348 4792 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114354 4792 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114362 4792 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114370 4792 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114376 4792 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114381 4792 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114387 4792 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114392 4792 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114398 4792 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114403 4792 flags.go:64] FLAG: --cgroup-root="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114408 4792 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114416 4792 flags.go:64] FLAG: --client-ca-file="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114421 4792 flags.go:64] FLAG: --cloud-config="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114426 4792 flags.go:64] FLAG: --cloud-provider="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114432 4792 flags.go:64] FLAG: --cluster-dns="[]" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114439 4792 flags.go:64] FLAG: --cluster-domain="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114445 4792 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114451 4792 flags.go:64] FLAG: --config-dir="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114456 4792 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114463 4792 flags.go:64] FLAG: --container-log-max-files="5" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114471 4792 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114477 4792 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114482 4792 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114488 4792 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114493 4792 flags.go:64] FLAG: --contention-profiling="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114498 4792 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114504 4792 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114511 4792 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114516 4792 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114523 4792 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114529 4792 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114534 4792 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114539 4792 flags.go:64] FLAG: --enable-load-reader="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114545 4792 flags.go:64] FLAG: --enable-server="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114550 4792 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114557 4792 flags.go:64] FLAG: --event-burst="100" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114562 4792 flags.go:64] FLAG: --event-qps="50" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114568 4792 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114574 4792 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114579 4792 flags.go:64] FLAG: --eviction-hard="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114585 4792 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114592 4792 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114597 4792 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114603 4792 flags.go:64] FLAG: --eviction-soft="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114608 4792 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114613 4792 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114619 4792 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114625 4792 flags.go:64] FLAG: --experimental-mounter-path="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114630 4792 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114636 4792 flags.go:64] FLAG: --fail-swap-on="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114641 4792 flags.go:64] FLAG: --feature-gates="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114649 4792 flags.go:64] FLAG: --file-check-frequency="20s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114654 4792 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114660 4792 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114666 4792 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114672 4792 flags.go:64] FLAG: --healthz-port="10248" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114678 4792 flags.go:64] FLAG: --help="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114683 4792 flags.go:64] FLAG: --hostname-override="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114689 4792 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114696 4792 flags.go:64] FLAG: --http-check-frequency="20s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114701 4792 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114706 4792 flags.go:64] FLAG: --image-credential-provider-config="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114711 4792 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114717 4792 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114723 4792 flags.go:64] FLAG: --image-service-endpoint="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114728 4792 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114733 4792 flags.go:64] FLAG: --kube-api-burst="100" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114739 4792 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114744 4792 flags.go:64] FLAG: --kube-api-qps="50" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114750 4792 flags.go:64] FLAG: --kube-reserved="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114756 4792 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114761 4792 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114767 4792 flags.go:64] FLAG: --kubelet-cgroups="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114772 4792 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114778 4792 flags.go:64] FLAG: --lock-file="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114783 4792 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114788 4792 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114794 4792 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114803 4792 flags.go:64] FLAG: --log-json-split-stream="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114808 4792 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114814 4792 flags.go:64] FLAG: --log-text-split-stream="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114820 4792 flags.go:64] FLAG: --logging-format="text" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114825 4792 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114832 4792 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114838 4792 flags.go:64] FLAG: --manifest-url="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114843 4792 flags.go:64] FLAG: --manifest-url-header="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114867 4792 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114873 4792 flags.go:64] FLAG: --max-open-files="1000000" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114880 4792 flags.go:64] FLAG: --max-pods="110" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114885 4792 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114891 4792 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114897 4792 flags.go:64] FLAG: --memory-manager-policy="None" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114902 4792 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114907 4792 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114912 4792 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114917 4792 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114930 4792 flags.go:64] FLAG: --node-status-max-images="50" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114935 4792 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114940 4792 flags.go:64] FLAG: --oom-score-adj="-999" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114945 4792 flags.go:64] FLAG: --pod-cidr="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114949 4792 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114958 4792 flags.go:64] FLAG: --pod-manifest-path="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114963 4792 flags.go:64] FLAG: --pod-max-pids="-1" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114968 4792 flags.go:64] FLAG: --pods-per-core="0" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114973 4792 flags.go:64] FLAG: --port="10250" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114979 4792 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114984 4792 flags.go:64] FLAG: --provider-id="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114989 4792 flags.go:64] FLAG: --qos-reserved="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.114994 4792 flags.go:64] FLAG: --read-only-port="10255" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115000 4792 flags.go:64] FLAG: --register-node="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115005 4792 flags.go:64] FLAG: --register-schedulable="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115010 4792 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115020 4792 flags.go:64] FLAG: --registry-burst="10" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115025 4792 flags.go:64] FLAG: --registry-qps="5" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115031 4792 flags.go:64] FLAG: --reserved-cpus="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115037 4792 flags.go:64] FLAG: --reserved-memory="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115044 4792 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115049 4792 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115054 4792 flags.go:64] FLAG: --rotate-certificates="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115061 4792 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115066 4792 flags.go:64] FLAG: --runonce="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115071 4792 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115076 4792 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115082 4792 flags.go:64] FLAG: --seccomp-default="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115087 4792 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115092 4792 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115098 4792 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115103 4792 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115108 4792 flags.go:64] FLAG: --storage-driver-password="root" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115113 4792 flags.go:64] FLAG: --storage-driver-secure="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115119 4792 flags.go:64] FLAG: --storage-driver-table="stats" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115124 4792 flags.go:64] FLAG: --storage-driver-user="root" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115138 4792 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115144 4792 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115150 4792 flags.go:64] FLAG: --system-cgroups="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115154 4792 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115163 4792 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115169 4792 flags.go:64] FLAG: --tls-cert-file="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115174 4792 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115182 4792 flags.go:64] FLAG: --tls-min-version="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115187 4792 flags.go:64] FLAG: --tls-private-key-file="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115192 4792 flags.go:64] FLAG: --topology-manager-policy="none" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115197 4792 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115203 4792 flags.go:64] FLAG: --topology-manager-scope="container" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115208 4792 flags.go:64] FLAG: --v="2" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115216 4792 flags.go:64] FLAG: --version="false" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115223 4792 flags.go:64] FLAG: --vmodule="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115231 4792 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115237 4792 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115362 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115369 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115376 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115383 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115388 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115394 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115400 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115405 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115410 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115414 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115419 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115423 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115428 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115432 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115437 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115441 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115446 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115450 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115455 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115459 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115464 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115468 4792 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115474 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115481 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115486 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115491 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115496 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115500 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115505 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115510 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115515 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115524 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115528 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115532 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115537 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115542 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115546 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115550 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115555 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115559 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115564 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115568 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115574 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115578 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115583 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115588 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115595 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115601 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115606 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115611 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115616 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115621 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115626 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115631 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115637 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115642 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115647 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115656 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115660 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115665 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115670 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115674 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115679 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115685 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115690 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115695 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115700 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115704 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115708 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115712 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.115717 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.115731 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.122449 4792 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.122490 4792 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122558 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122569 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122575 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122580 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122586 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122591 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122595 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122600 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122604 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122609 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122615 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122623 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122629 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122638 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122643 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122648 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122653 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122658 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122664 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122670 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122676 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122682 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122688 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122693 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122697 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122702 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122706 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122711 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122715 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122720 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122724 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122729 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122734 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122738 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122743 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122747 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122752 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122756 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122762 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122769 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122774 4792 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122779 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122784 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122789 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122794 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122799 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122804 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122810 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122815 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122820 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122825 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122830 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122835 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122840 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122844 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122865 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122870 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122874 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122879 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122883 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122888 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122892 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122896 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122900 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122905 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122910 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122915 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122919 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122923 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122927 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.122932 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.122940 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123088 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123101 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123107 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123112 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123117 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123122 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123126 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123130 4792 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123135 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123139 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123143 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123148 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123155 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123161 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123166 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123171 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123177 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123182 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123187 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123191 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123197 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123201 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123205 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123210 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123215 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123219 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123224 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123228 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123233 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123239 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123247 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123252 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123256 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123261 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123266 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123270 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123275 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123279 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123284 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123288 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123292 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123296 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123301 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123306 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123310 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123315 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123320 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123324 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123329 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123334 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123340 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123346 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123351 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123357 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123361 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123366 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123371 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123376 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123382 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123388 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123393 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123397 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123402 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123406 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123411 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123416 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123421 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123425 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123430 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123434 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.123440 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.123448 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.123867 4792 server.go:940] "Client rotation is on, will bootstrap in background" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.129231 4792 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.129335 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.129897 4792 server.go:997] "Starting client certificate rotation" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.129922 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.130358 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-21 19:39:38.521059204 +0000 UTC Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.130493 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.134887 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.136242 4792 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.136265 4792 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.142321 4792 log.go:25] "Validated CRI v1 runtime API" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.169113 4792 log.go:25] "Validated CRI v1 image API" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.170953 4792 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.173419 4792 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-21-17-50-30-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.173450 4792 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.188221 4792 manager.go:217] Machine: {Timestamp:2026-01-21 17:56:06.18583077 +0000 UTC m=+0.167793976 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d27e7875-2a6d-4c63-ab2a-7dede22b0172 BootID:24361cbe-9975-4a7d-97f7-cc9f17426792 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:dc:04:08 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:dc:04:08 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:27:71:da Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:89:f7:f8 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:75:81:2e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:37:cc:95 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:fa:ba:48:2e:4d:f9 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ee:17:79:cc:2e:e3 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.188462 4792 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.188602 4792 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189020 4792 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189198 4792 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189225 4792 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189403 4792 topology_manager.go:138] "Creating topology manager with none policy" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189412 4792 container_manager_linux.go:303] "Creating device plugin manager" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189654 4792 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189689 4792 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189859 4792 state_mem.go:36] "Initialized new in-memory state store" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.189931 4792 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.190667 4792 kubelet.go:418] "Attempting to sync node with API server" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.190694 4792 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.190715 4792 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.190726 4792 kubelet.go:324] "Adding apiserver pod source" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.190736 4792 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.192398 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.192479 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.192469 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.192589 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.192665 4792 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.192976 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.193978 4792 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194441 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194463 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194469 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194476 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194487 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194493 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194501 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194513 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194521 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194528 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194538 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194545 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.194944 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.195338 4792 server.go:1280] "Started kubelet" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.196003 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.196167 4792 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.197135 4792 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 21 17:56:06 crc systemd[1]: Started Kubernetes Kubelet. Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.197592 4792 server.go:460] "Adding debug handlers to kubelet server" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198469 4792 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198664 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198692 4792 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198771 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 16:07:22.049349393 +0000 UTC Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198879 4792 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198891 4792 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.198891 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.198974 4792 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.199573 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="200ms" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.199788 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.199932 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200366 4792 factory.go:55] Registering systemd factory Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200385 4792 factory.go:221] Registration of the systemd container factory successfully Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200738 4792 factory.go:153] Registering CRI-O factory Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200757 4792 factory.go:221] Registration of the crio container factory successfully Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200831 4792 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200891 4792 factory.go:103] Registering Raw factory Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.200907 4792 manager.go:1196] Started watching for new ooms in manager Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.200801 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188cd0a64548d628 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:56:06.195312168 +0000 UTC m=+0.177275354,LastTimestamp:2026-01-21 17:56:06.195312168 +0000 UTC m=+0.177275354,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.201670 4792 manager.go:319] Starting recovery of all containers Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217188 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217241 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217259 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217274 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217288 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217300 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217311 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217322 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217337 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217349 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217361 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217372 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217384 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217398 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217410 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217422 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217433 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217445 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217454 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217465 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217478 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217492 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217504 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217514 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217526 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217537 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217551 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217563 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217574 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217586 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217595 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217642 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217657 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217670 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217680 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217691 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217701 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217712 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217722 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217734 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217748 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217780 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217792 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217805 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217829 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217842 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217868 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217879 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217891 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217903 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217916 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217935 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217952 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217970 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217982 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.217994 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218006 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218019 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218030 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218042 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218055 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218067 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218079 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218092 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218103 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218116 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218129 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218144 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218156 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218169 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218181 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218194 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218205 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218218 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218230 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218245 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218257 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218270 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218282 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218344 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218356 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218369 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218381 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218395 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218407 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218419 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218430 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218443 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218460 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218472 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218483 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218499 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218510 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218522 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218532 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218544 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218554 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218566 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218577 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218591 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218603 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218615 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218631 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218642 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218658 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218671 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218683 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218696 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218708 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218720 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218731 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218743 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218758 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218770 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218782 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218795 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218806 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218817 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218828 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218840 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218865 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218876 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218887 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218899 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218917 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218931 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218943 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218954 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218965 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218975 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218986 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.218997 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219010 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219021 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219035 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219045 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219057 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219069 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219079 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219094 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219106 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219118 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219131 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219143 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219156 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219170 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219180 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219194 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219206 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219217 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219228 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219240 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219260 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219270 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219281 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219292 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219303 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219314 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219326 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219337 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219348 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219359 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219370 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219384 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219395 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219406 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219419 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219431 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219442 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219454 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219465 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219476 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219487 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219499 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219510 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219521 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219532 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219545 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219556 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219569 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.219579 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220728 4792 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220752 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220765 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220776 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220787 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220797 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220807 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220817 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220827 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220838 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220869 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220881 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220892 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220904 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220979 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.220992 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221003 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221015 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221027 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221039 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221051 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221064 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221075 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221086 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221139 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221154 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221167 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221178 4792 reconstruct.go:97] "Volume reconstruction finished" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.221185 4792 reconciler.go:26] "Reconciler: start to sync state" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.232599 4792 manager.go:324] Recovery completed Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.241819 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.243003 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.243599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.243918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.244027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.244751 4792 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.244765 4792 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.244781 4792 state_mem.go:36] "Initialized new in-memory state store" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.245287 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.245336 4792 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.245362 4792 kubelet.go:2335] "Starting kubelet main sync loop" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.245489 4792 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.246265 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.246398 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.258610 4792 policy_none.go:49] "None policy: Start" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.260106 4792 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.260145 4792 state_mem.go:35] "Initializing new in-memory state store" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.299316 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316095 4792 manager.go:334] "Starting Device Plugin manager" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316144 4792 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316163 4792 server.go:79] "Starting device plugin registration server" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316571 4792 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316588 4792 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316726 4792 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316839 4792 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.316865 4792 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.325056 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.346405 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.346799 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348424 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348628 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.348695 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.350754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.351656 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.352581 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.352744 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.355708 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.356145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.356220 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.356338 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357548 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357674 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357883 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.357913 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.358925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.358954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.358965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359091 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359116 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359135 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.359920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.400253 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="400ms" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.416709 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.417690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.417744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.417755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.417788 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.418413 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.68:6443: connect: connection refused" node="crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422465 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422512 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422538 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422558 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422611 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422638 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422673 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422836 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422920 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.422966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.423020 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.423072 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.423098 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.423119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.423135 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524096 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524149 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524161 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524174 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524187 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524204 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524217 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524233 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524271 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524348 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524392 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524481 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524376 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524516 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524545 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524557 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524525 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524614 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524600 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524630 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524639 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.524654 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.619259 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.620702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.620731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.620741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.620768 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.621195 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.68:6443: connect: connection refused" node="crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.673626 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.688838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.701364 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.717665 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: I0121 17:56:06.723083 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:06 crc kubenswrapper[4792]: E0121 17:56:06.802017 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="800ms" Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.943232 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-80dfbf6a4ee235d3d0774ca5b62779a6c2468293551ffc78cb5c5219c2a770c6 WatchSource:0}: Error finding container 80dfbf6a4ee235d3d0774ca5b62779a6c2468293551ffc78cb5c5219c2a770c6: Status 404 returned error can't find the container with id 80dfbf6a4ee235d3d0774ca5b62779a6c2468293551ffc78cb5c5219c2a770c6 Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.944039 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f4005d09501fc495240993d1765b6c8c94ab14abf2b800843d2f43d54ca65055 WatchSource:0}: Error finding container f4005d09501fc495240993d1765b6c8c94ab14abf2b800843d2f43d54ca65055: Status 404 returned error can't find the container with id f4005d09501fc495240993d1765b6c8c94ab14abf2b800843d2f43d54ca65055 Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.947023 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-2f01a8c744abdcc9bc4b7c708956f23f7c9b06eaa1e18d9e3099be41eacfc9b6 WatchSource:0}: Error finding container 2f01a8c744abdcc9bc4b7c708956f23f7c9b06eaa1e18d9e3099be41eacfc9b6: Status 404 returned error can't find the container with id 2f01a8c744abdcc9bc4b7c708956f23f7c9b06eaa1e18d9e3099be41eacfc9b6 Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.948008 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-360f76879aac4717dc10e679de691f51869c20aa16dafa54f137263a40e1bec5 WatchSource:0}: Error finding container 360f76879aac4717dc10e679de691f51869c20aa16dafa54f137263a40e1bec5: Status 404 returned error can't find the container with id 360f76879aac4717dc10e679de691f51869c20aa16dafa54f137263a40e1bec5 Jan 21 17:56:06 crc kubenswrapper[4792]: W0121 17:56:06.950526 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-f408adf1e9aa41d220e4a867c0e5e26c9c8617ce182cff8f8fb6643fa38659a8 WatchSource:0}: Error finding container f408adf1e9aa41d220e4a867c0e5e26c9c8617ce182cff8f8fb6643fa38659a8: Status 404 returned error can't find the container with id f408adf1e9aa41d220e4a867c0e5e26c9c8617ce182cff8f8fb6643fa38659a8 Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.021407 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.022627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.022658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.022668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.022689 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.023100 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.68:6443: connect: connection refused" node="crc" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.196921 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.199042 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 13:02:50.452592824 +0000 UTC Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.252484 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"80dfbf6a4ee235d3d0774ca5b62779a6c2468293551ffc78cb5c5219c2a770c6"} Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.253623 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2f01a8c744abdcc9bc4b7c708956f23f7c9b06eaa1e18d9e3099be41eacfc9b6"} Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.254614 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f4005d09501fc495240993d1765b6c8c94ab14abf2b800843d2f43d54ca65055"} Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.256471 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f408adf1e9aa41d220e4a867c0e5e26c9c8617ce182cff8f8fb6643fa38659a8"} Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.257568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"360f76879aac4717dc10e679de691f51869c20aa16dafa54f137263a40e1bec5"} Jan 21 17:56:07 crc kubenswrapper[4792]: W0121 17:56:07.262765 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.262892 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:07 crc kubenswrapper[4792]: W0121 17:56:07.346678 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.346824 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:07 crc kubenswrapper[4792]: W0121 17:56:07.564614 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.564787 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.603058 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="1.6s" Jan 21 17:56:07 crc kubenswrapper[4792]: W0121 17:56:07.659525 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.659629 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.823280 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.826311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.826356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.826366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:07 crc kubenswrapper[4792]: I0121 17:56:07.826398 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:07 crc kubenswrapper[4792]: E0121 17:56:07.827364 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.68:6443: connect: connection refused" node="crc" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.197159 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.199133 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 06:51:16.18849971 +0000 UTC Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.261489 4792 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06" exitCode=0 Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.261554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.261647 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.262703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.262732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.262742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.263604 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5" exitCode=0 Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.263662 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.263673 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.264339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.264365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.264373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.265556 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.266140 4792 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a" exitCode=0 Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.266216 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.266283 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268101 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.268933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:08 crc kubenswrapper[4792]: E0121 17:56:08.269350 4792 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.274699 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.274781 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.274798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.276298 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742" exitCode=0 Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.276365 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742"} Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.276542 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.282608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.282652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:08 crc kubenswrapper[4792]: I0121 17:56:08.282666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.197630 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.199639 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 20:20:40.438380416 +0000 UTC Jan 21 17:56:09 crc kubenswrapper[4792]: E0121 17:56:09.204577 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="3.2s" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.280989 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.281032 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.281041 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.281078 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.282017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.282053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.282067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.284518 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.284548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.284559 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.284570 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.289075 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.289126 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.293035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.293103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.293115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.301677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.301758 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.303044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.303076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.303084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.305936 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1" exitCode=0 Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.305984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1"} Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.306220 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.307737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.307785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.307794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: W0121 17:56:09.351425 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.68:6443: connect: connection refused Jan 21 17:56:09 crc kubenswrapper[4792]: E0121 17:56:09.351538 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.68:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.428349 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.430016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.430068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.430079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:09 crc kubenswrapper[4792]: I0121 17:56:09.430110 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:09 crc kubenswrapper[4792]: E0121 17:56:09.430682 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.68:6443: connect: connection refused" node="crc" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.200308 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 12:29:44.163466212 +0000 UTC Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.312194 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed" exitCode=0 Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.312278 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.312289 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed"} Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.313274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.313319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.313338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63"} Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315712 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315752 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315779 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315759 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.315760 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:10 crc kubenswrapper[4792]: I0121 17:56:10.317224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.201068 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 18:25:37.618500064 +0000 UTC Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322190 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322234 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f"} Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322798 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322807 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d"} Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322820 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050"} Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322830 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1"} Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.322840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78"} Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:11 crc kubenswrapper[4792]: I0121 17:56:11.323997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.201225 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 00:05:00.845465306 +0000 UTC Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.324101 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.324760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.324785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.324793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.341992 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.458417 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.458882 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.459900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.460016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.460080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.609841 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.631124 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.632359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.632401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.632414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.632441 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.817607 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.817789 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.817833 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.819030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.819062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:12 crc kubenswrapper[4792]: I0121 17:56:12.819071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.203156 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 07:05:23.813458191 +0000 UTC Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.326925 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.327796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.327864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.327877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.500125 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.500245 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.500279 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.501210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.501249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:13 crc kubenswrapper[4792]: I0121 17:56:13.501260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:14 crc kubenswrapper[4792]: I0121 17:56:14.203606 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 11:16:56.829881124 +0000 UTC Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.204179 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 22:21:13.966555149 +0000 UTC Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.400126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.400304 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.401238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.401266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:15 crc kubenswrapper[4792]: I0121 17:56:15.401274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.204906 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 23:44:50.931597285 +0000 UTC Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.278468 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.278777 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.280316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.280404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.280436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:16 crc kubenswrapper[4792]: E0121 17:56:16.325268 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.890290 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.890503 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.891553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.891586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:16 crc kubenswrapper[4792]: I0121 17:56:16.891598 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.179910 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.180062 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.181193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.181240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.181249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.205675 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 18:44:26.451387133 +0000 UTC Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.220970 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.334780 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.335634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.335654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:17 crc kubenswrapper[4792]: I0121 17:56:17.335662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.048333 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.053753 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.206459 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 03:38:16.211983894 +0000 UTC Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.337917 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.338961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.339001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.339010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:18 crc kubenswrapper[4792]: I0121 17:56:18.341663 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.207286 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 23:43:30.717879221 +0000 UTC Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.339967 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.340919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.340967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.340978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:19 crc kubenswrapper[4792]: W0121 17:56:19.666936 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.667223 4792 trace.go:236] Trace[1348991081]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:56:09.665) (total time: 10001ms): Jan 21 17:56:19 crc kubenswrapper[4792]: Trace[1348991081]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (17:56:19.666) Jan 21 17:56:19 crc kubenswrapper[4792]: Trace[1348991081]: [10.001269937s] [10.001269937s] END Jan 21 17:56:19 crc kubenswrapper[4792]: E0121 17:56:19.667367 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:56:19 crc kubenswrapper[4792]: W0121 17:56:19.683982 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:56:19 crc kubenswrapper[4792]: I0121 17:56:19.684457 4792 trace.go:236] Trace[357067259]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:56:09.683) (total time: 10000ms): Jan 21 17:56:19 crc kubenswrapper[4792]: Trace[357067259]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (17:56:19.683) Jan 21 17:56:19 crc kubenswrapper[4792]: Trace[357067259]: [10.000991101s] [10.000991101s] END Jan 21 17:56:19 crc kubenswrapper[4792]: E0121 17:56:19.684497 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.179921 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.180243 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.179939 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.180424 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.185830 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.186120 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.207577 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 21:24:32.043511527 +0000 UTC Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.343033 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.343972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.344066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:20 crc kubenswrapper[4792]: I0121 17:56:20.344163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:21 crc kubenswrapper[4792]: I0121 17:56:21.207903 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 17:47:07.646119919 +0000 UTC Jan 21 17:56:22 crc kubenswrapper[4792]: I0121 17:56:22.208676 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 13:04:43.202928679 +0000 UTC Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.209154 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 13:16:33.659276813 +0000 UTC Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.510562 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.510727 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.511662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.511704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.511714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.516515 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:23 crc kubenswrapper[4792]: I0121 17:56:23.752620 4792 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.201353 4792 apiserver.go:52] "Watching apiserver" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.207482 4792 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208011 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208513 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208581 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208606 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:24 crc kubenswrapper[4792]: E0121 17:56:24.208681 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:24 crc kubenswrapper[4792]: E0121 17:56:24.208746 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208907 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.208913 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:24 crc kubenswrapper[4792]: E0121 17:56:24.209083 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.209968 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 20:24:15.259661428 +0000 UTC Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.212621 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.212892 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.213070 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.213188 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.213579 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.213640 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.214779 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.214880 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.215503 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.243959 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.259085 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.268656 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.282001 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.292873 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.299841 4792 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.304351 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.330278 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.348645 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.361985 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.367828 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.373176 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:24 crc kubenswrapper[4792]: I0121 17:56:24.419754 4792 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.183679 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.186008 4792 trace.go:236] Trace[437942513]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:56:14.439) (total time: 10746ms): Jan 21 17:56:25 crc kubenswrapper[4792]: Trace[437942513]: ---"Objects listed" error: 10746ms (17:56:25.185) Jan 21 17:56:25 crc kubenswrapper[4792]: Trace[437942513]: [10.746464901s] [10.746464901s] END Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.186276 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.186590 4792 trace.go:236] Trace[2082671193]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:56:10.696) (total time: 14489ms): Jan 21 17:56:25 crc kubenswrapper[4792]: Trace[2082671193]: ---"Objects listed" error: 14489ms (17:56:25.186) Jan 21 17:56:25 crc kubenswrapper[4792]: Trace[2082671193]: [14.489974325s] [14.489974325s] END Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.186727 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.186994 4792 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.187667 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.188000 4792 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.209425 4792 csr.go:261] certificate signing request csr-6jrs6 is approved, waiting to be issued Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.210344 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 01:30:22.749725609 +0000 UTC Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.211408 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.222740 4792 csr.go:257] certificate signing request csr-6jrs6 is issued Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.225105 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.239936 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.245824 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.246042 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.250978 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.263946 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.277983 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289445 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289511 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289543 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289568 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289597 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289626 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289648 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289670 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289695 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289718 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289741 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289766 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289822 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289866 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289891 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289922 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289945 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289969 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.289991 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290015 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290040 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290066 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290109 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290133 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290347 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290370 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290394 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290375 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290419 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290530 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290564 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290591 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290640 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290665 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290695 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290731 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290884 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290913 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290941 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290961 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.290966 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291011 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291045 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291072 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291095 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291118 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291143 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291171 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291185 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291198 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291224 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291250 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291300 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291279 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291458 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291496 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291527 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291531 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291556 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291590 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291601 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291619 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291654 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291711 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291733 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291744 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291775 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291803 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291830 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291882 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291935 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.291968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292010 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292014 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292041 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292070 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292100 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292134 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292171 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292200 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292266 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292292 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292323 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292350 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292408 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292434 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292463 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292523 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292552 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292578 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292605 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292618 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292632 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292659 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292736 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292766 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292790 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292834 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292848 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292934 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.292995 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293022 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293057 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293087 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293117 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293147 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293177 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293204 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293233 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293280 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293307 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293336 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293363 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293392 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293420 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293435 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293448 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293557 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293594 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293634 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293664 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293688 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293705 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293723 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293730 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293741 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293808 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293835 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293877 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.293903 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.294477 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.294843 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.294912 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295452 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295367 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295569 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295673 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295713 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295747 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295813 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295921 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295972 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296024 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296077 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296314 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296541 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296587 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296618 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296654 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296729 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296760 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296846 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297245 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297301 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297742 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297817 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297945 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297987 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.298032 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.298064 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.298203 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.317952 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318106 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318142 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318183 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318218 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318237 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318258 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318290 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318410 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321322 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321381 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321414 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321444 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321472 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321495 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321531 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321558 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321587 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321621 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321647 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321673 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321719 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321749 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321769 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321786 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.321825 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.322016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.322041 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.322062 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.322082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.295876 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296003 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296095 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296520 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.330931 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296551 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296823 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296901 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.296916 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.297890 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.298340 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.305360 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.306242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.307362 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.308015 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.308112 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.312238 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.317055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318038 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.318991 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.319036 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.319730 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.320322 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.301369 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.326318 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.326361 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.326520 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.326576 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.330450 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331048 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331100 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331182 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331319 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331346 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331353 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331344 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331589 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331637 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.331811 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332124 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332175 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332183 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332261 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.317824 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332600 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332602 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332679 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332890 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.332915 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333189 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333349 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333363 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333453 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333388 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333793 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333828 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334163 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334416 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334677 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334687 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334893 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334986 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334993 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.335256 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.335902 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.335914 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.336363 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.336617 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.337055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.337092 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.340966 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.343669 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.344075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.344210 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.334569 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.345643 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.356735 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357099 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357163 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357534 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357671 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357789 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.333712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357878 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357931 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357952 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.357973 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358138 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358237 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358446 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358689 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.358925 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.359126 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.330828 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360202 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.330473 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360425 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360695 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360804 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360798 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360893 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360892 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.361089 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.361194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.361477 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.361984 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.363232 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363303 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363396 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363652 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363673 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.360090 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.364122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.364384 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.364666 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.364747 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365284 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365358 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365388 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365696 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365795 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365877 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.365934 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.366018 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363943 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.366232 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.366452 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.366464 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.366541 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.363586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.371441 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.373278 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.374341 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.374645 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.375439 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.375546 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.375717 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.376440 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.376973 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.378994 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.379389 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.379400 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.379698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.379972 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380363 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380410 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380612 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380737 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380791 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.380870 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381031 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381177 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381233 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381241 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381546 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.381869 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.382241 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.382509 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.382986 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.383317 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.383944 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.384551 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.384656 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.384781 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:25.884751433 +0000 UTC m=+19.866714609 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.384902 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:25.884892258 +0000 UTC m=+19.866855444 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.385660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.385722 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:25.885677309 +0000 UTC m=+19.867640495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386034 4792 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386162 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386199 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386216 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386235 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386251 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386266 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386280 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386292 4792 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386304 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386317 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386328 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386341 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386352 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386364 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386409 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386424 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386436 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386448 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386459 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386474 4792 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386486 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386500 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386517 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386531 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386544 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386558 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386569 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386582 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386596 4792 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386610 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386622 4792 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386634 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386646 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386659 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386670 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386686 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386700 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386711 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386724 4792 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386736 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386747 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386759 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386770 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386782 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386793 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.386875 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.387113 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.387570 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.387586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.388720 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.389342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.389412 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.389880 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.390656 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.390975 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.391017 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.391508 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.392062 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.395891 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.388323 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.399131 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.399203 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.400164 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.408378 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.408446 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.409188 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.412446 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.412503 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.412525 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.412536 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.412621 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:25.912591472 +0000 UTC m=+19.894554858 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.413055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.417235 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.421347 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.425088 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.431040 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.431081 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.431093 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.431158 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:25.931137935 +0000 UTC m=+19.913101111 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.435752 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.438183 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.489803 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.490105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.491010 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.491419 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493183 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493276 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493364 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493521 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493613 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493693 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493768 4792 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493831 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493922 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.493998 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494097 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494173 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494247 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.491426 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494309 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494409 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494433 4792 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494448 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494464 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494479 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494494 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494519 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494532 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494546 4792 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494561 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494574 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494590 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494605 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494620 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494633 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494649 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494663 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494679 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494693 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494708 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494722 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494735 4792 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494748 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494761 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494774 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494787 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494799 4792 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494811 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494824 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494836 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494868 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494881 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494896 4792 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494909 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494921 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494934 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494946 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494958 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494970 4792 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494981 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.494998 4792 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495022 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495036 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495049 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495065 4792 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495079 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495092 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495105 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495119 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495151 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495165 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495180 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495193 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495206 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495224 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495238 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495251 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495264 4792 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495277 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495290 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495303 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495316 4792 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495328 4792 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495341 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495353 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495367 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495382 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495394 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495405 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495417 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495431 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495445 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495457 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495468 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495481 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495496 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495507 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495518 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495531 4792 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495544 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495556 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495569 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495583 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495595 4792 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495608 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495621 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495634 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495649 4792 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495665 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495679 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495694 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495710 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495725 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495741 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495755 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495769 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495782 4792 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.495799 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496575 4792 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496599 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496612 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496664 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496683 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496696 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496745 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496837 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496875 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496888 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496898 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496909 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496920 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496933 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496945 4792 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496955 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496967 4792 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496980 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.496992 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497011 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497022 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497035 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497047 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497072 4792 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497831 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497893 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.497907 4792 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498049 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498088 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498134 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498147 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498165 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498201 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498216 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498229 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.498241 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.723381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.734583 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:56:25 crc kubenswrapper[4792]: W0121 17:56:25.735200 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-d1ef5f5cd8b4d0327e46bf2876575d5a8e4655de248aac942f3e0bcf869fd434 WatchSource:0}: Error finding container d1ef5f5cd8b4d0327e46bf2876575d5a8e4655de248aac942f3e0bcf869fd434: Status 404 returned error can't find the container with id d1ef5f5cd8b4d0327e46bf2876575d5a8e4655de248aac942f3e0bcf869fd434 Jan 21 17:56:25 crc kubenswrapper[4792]: W0121 17:56:25.753906 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-1959b6bd5eaed873326be4cc9c156f403fff0f9eba90a1f9082818066b2992f8 WatchSource:0}: Error finding container 1959b6bd5eaed873326be4cc9c156f403fff0f9eba90a1f9082818066b2992f8: Status 404 returned error can't find the container with id 1959b6bd5eaed873326be4cc9c156f403fff0f9eba90a1f9082818066b2992f8 Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.901698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.901807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:25 crc kubenswrapper[4792]: I0121 17:56:25.901867 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.902009 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.902090 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:26.902069442 +0000 UTC m=+20.884032628 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.902174 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:26.902161714 +0000 UTC m=+20.884124900 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.902269 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:25 crc kubenswrapper[4792]: E0121 17:56:25.902330 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:26.902323278 +0000 UTC m=+20.884286464 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.002667 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.002729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.002944 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.002968 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.002985 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.003060 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:27.00303923 +0000 UTC m=+20.985002416 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.003091 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.003129 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.003142 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.003236 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:27.003212315 +0000 UTC m=+20.985175501 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.131027 4792 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.131387 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events\": read tcp 38.129.56.68:54268->38.129.56.68:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.188cd0a675c05355 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:56:07.008449365 +0000 UTC m=+0.990412571,LastTimestamp:2026-01-21 17:56:07.008449365 +0000 UTC m=+0.990412571,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:56:26 crc kubenswrapper[4792]: W0121 17:56:26.131523 4792 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 21 17:56:26 crc kubenswrapper[4792]: W0121 17:56:26.131542 4792 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.211752 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 17:55:21.29980069 +0000 UTC Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.225091 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-21 17:51:25 +0000 UTC, rotation deadline is 2026-12-10 23:30:24.504342217 +0000 UTC Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.225167 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7757h33m58.279178721s for next certificate rotation Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.246111 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.246168 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.246270 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.246392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.258087 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.258614 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.259824 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.260501 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.261506 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.262043 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.262587 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.263500 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.264123 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.264413 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.265023 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.265516 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.266588 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.267164 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.267749 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.268703 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.269303 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.270363 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.271299 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.271868 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.273387 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.273818 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.274763 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.275203 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.276332 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.276737 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.277624 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.278794 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.279290 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.280289 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.280725 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.281592 4792 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.281690 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.283391 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.284368 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.284894 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.286913 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.287700 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.288046 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.288865 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.289653 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.290810 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.291570 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.292772 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.293643 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.294904 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.295495 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.296641 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.297551 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.299544 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.300616 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.302101 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.303197 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.304376 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.305779 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.306840 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.309198 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.323316 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.342011 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.362334 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.384583 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.393577 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.393696 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d1ef5f5cd8b4d0327e46bf2876575d5a8e4655de248aac942f3e0bcf869fd434"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.396140 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.396170 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.396180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5f6de6ad435c222f3f5bdfeaf80a0c7658c57e70e151b1146b81a4af5726196e"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.398028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1959b6bd5eaed873326be4cc9c156f403fff0f9eba90a1f9082818066b2992f8"} Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.416968 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.452139 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.481715 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.498106 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.516601 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.535820 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.553422 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.569610 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.584286 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.601764 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.616509 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.629794 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.645137 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.667136 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.911900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.912051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.912082 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.912210 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.912292 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:28.912268972 +0000 UTC m=+22.894232158 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.912377 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:28.912365525 +0000 UTC m=+22.894328711 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.912476 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: E0121 17:56:26.912516 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:28.91250699 +0000 UTC m=+22.894470176 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.942602 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.966378 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 21 17:56:26 crc kubenswrapper[4792]: I0121 17:56:26.975299 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.009878 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.013263 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.013319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013520 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013548 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013566 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013638 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:29.013614162 +0000 UTC m=+22.995577348 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013722 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013738 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013748 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.013779 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:29.013769736 +0000 UTC m=+22.995732922 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.036429 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.063945 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.100940 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.127909 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.183287 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.191690 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.192008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.206722 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.211987 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 10:17:12.250590522 +0000 UTC Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.229114 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.246191 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.246390 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.259139 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.261379 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.275629 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.290071 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.304518 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.318282 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.337625 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.355254 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-jbz42"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.355712 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.357671 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.357820 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.358875 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.359769 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.384000 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.402346 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.417079 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-hosts-file\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.417148 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7zdm\" (UniqueName: \"kubernetes.io/projected/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-kube-api-access-c7zdm\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: E0121 17:56:27.421527 4792 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.422140 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.437654 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.466013 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.479827 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.490783 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.504272 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.517126 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.517716 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7zdm\" (UniqueName: \"kubernetes.io/projected/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-kube-api-access-c7zdm\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.517807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-hosts-file\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.517932 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-hosts-file\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.529249 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.536772 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7zdm\" (UniqueName: \"kubernetes.io/projected/2dc141da-a3db-4ef1-8e59-d0e1d5dee765-kube-api-access-c7zdm\") pod \"node-resolver-jbz42\" (UID: \"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\") " pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.667525 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jbz42" Jan 21 17:56:27 crc kubenswrapper[4792]: W0121 17:56:27.683425 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dc141da_a3db_4ef1_8e59_d0e1d5dee765.slice/crio-38dfde582e277c788c5c90245d67543f66e056388803b1be1dd8ea405f8b9039 WatchSource:0}: Error finding container 38dfde582e277c788c5c90245d67543f66e056388803b1be1dd8ea405f8b9039: Status 404 returned error can't find the container with id 38dfde582e277c788c5c90245d67543f66e056388803b1be1dd8ea405f8b9039 Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.738617 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-m5d6x"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.739560 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-tvdgr"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.739805 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.740209 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.741961 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-6lc6z"] Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.742683 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.743698 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.743816 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.743924 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.743941 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.744018 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.744282 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.744418 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.746458 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.746819 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.747014 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.747057 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.747238 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.770779 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.816206 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821167 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-etc-kubernetes\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821235 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/759f2e21-e44e-4049-b262-cb49448e22ab-rootfs\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821329 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cni-binary-copy\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821410 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-socket-dir-parent\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821474 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-netns\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821526 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-system-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821562 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821614 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-k8s-cni-cncf-io\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-os-release\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821678 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-daemon-config\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-bin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-multus-certs\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821763 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-cnibin\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821790 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-binary-copy\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbmv4\" (UniqueName: \"kubernetes.io/projected/d8728e15-00c6-4fa7-a79a-cee551b64c18-kube-api-access-fbmv4\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821880 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-kubelet\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821934 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759f2e21-e44e-4049-b262-cb49448e22ab-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.821965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-conf-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822024 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822099 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-os-release\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822138 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-hostroot\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822162 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759f2e21-e44e-4049-b262-cb49448e22ab-proxy-tls\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822199 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-system-cni-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822263 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cnibin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7p2v\" (UniqueName: \"kubernetes.io/projected/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-kube-api-access-x7p2v\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822358 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822393 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-multus\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.822424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sb6s\" (UniqueName: \"kubernetes.io/projected/759f2e21-e44e-4049-b262-cb49448e22ab-kube-api-access-4sb6s\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.886290 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.923962 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924014 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-k8s-cni-cncf-io\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-os-release\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924065 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-daemon-config\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-cnibin\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924101 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-binary-copy\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-bin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-multus-certs\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbmv4\" (UniqueName: \"kubernetes.io/projected/d8728e15-00c6-4fa7-a79a-cee551b64c18-kube-api-access-fbmv4\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-kubelet\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924206 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759f2e21-e44e-4049-b262-cb49448e22ab-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924236 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-conf-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924256 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-os-release\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924328 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-hostroot\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924349 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759f2e21-e44e-4049-b262-cb49448e22ab-proxy-tls\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cnibin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924403 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7p2v\" (UniqueName: \"kubernetes.io/projected/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-kube-api-access-x7p2v\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-system-cni-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924438 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924457 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-multus\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sb6s\" (UniqueName: \"kubernetes.io/projected/759f2e21-e44e-4049-b262-cb49448e22ab-kube-api-access-4sb6s\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924492 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cni-binary-copy\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924511 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-socket-dir-parent\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924528 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-netns\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924550 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-etc-kubernetes\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/759f2e21-e44e-4049-b262-cb49448e22ab-rootfs\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924583 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-system-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-k8s-cni-cncf-io\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924638 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-system-cni-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924670 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-multus\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924691 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-etc-kubernetes\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924695 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-netns\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924714 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/759f2e21-e44e-4049-b262-cb49448e22ab-rootfs\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924550 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-os-release\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924738 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-hostroot\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924748 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-cnibin\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924831 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-run-multus-certs\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924925 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cnibin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.924992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-system-cni-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925046 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-os-release\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-cni-bin\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925122 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-socket-dir-parent\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925145 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-host-var-lib-kubelet\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925175 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-conf-dir\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925603 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-multus-daemon-config\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925890 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-cni-binary-copy\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925926 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925987 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d8728e15-00c6-4fa7-a79a-cee551b64c18-cni-binary-copy\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.925996 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759f2e21-e44e-4049-b262-cb49448e22ab-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.931441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759f2e21-e44e-4049-b262-cb49448e22ab-proxy-tls\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.933553 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.951937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7p2v\" (UniqueName: \"kubernetes.io/projected/129c7cf1-6a9e-440a-8d4e-049c0652cf6e-kube-api-access-x7p2v\") pod \"multus-tvdgr\" (UID: \"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\") " pod="openshift-multus/multus-tvdgr" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.977551 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.977722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sb6s\" (UniqueName: \"kubernetes.io/projected/759f2e21-e44e-4049-b262-cb49448e22ab-kube-api-access-4sb6s\") pod \"machine-config-daemon-m5d6x\" (UID: \"759f2e21-e44e-4049-b262-cb49448e22ab\") " pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.977953 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbmv4\" (UniqueName: \"kubernetes.io/projected/d8728e15-00c6-4fa7-a79a-cee551b64c18-kube-api-access-fbmv4\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:27 crc kubenswrapper[4792]: I0121 17:56:27.996143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.013263 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.026419 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.038301 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.053072 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tvdgr" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.058762 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.064252 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.076284 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.091643 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.117380 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.146418 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.154834 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8t4xq"] Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.155978 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.158670 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.158821 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.158830 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.159264 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.159434 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.159421 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.160347 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.166973 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.181012 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.193235 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.203420 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.212949 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 08:28:51.227381101 +0000 UTC Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.220187 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.226926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227004 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227097 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227133 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227235 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227365 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227405 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227434 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227595 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnmzx\" (UniqueName: \"kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227663 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227686 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227911 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.227957 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.228048 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.228113 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.237254 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.246799 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.246990 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.247087 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.247208 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.256978 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.270946 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.285912 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.301075 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.316065 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.327460 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328832 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328901 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328926 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328948 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328975 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329064 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328975 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.328984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnmzx\" (UniqueName: \"kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329361 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329403 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329367 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329408 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329565 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329649 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329715 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329751 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329753 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329800 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329820 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329818 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329890 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329933 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329949 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.329954 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.330008 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.330032 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.330034 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.330096 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.339738 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.361365 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.375589 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.392181 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.404164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jbz42" event={"ID":"2dc141da-a3db-4ef1-8e59-d0e1d5dee765","Type":"ContainerStarted","Data":"38dfde582e277c788c5c90245d67543f66e056388803b1be1dd8ea405f8b9039"} Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.408292 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.423313 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.437588 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.454599 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.472525 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.497244 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.512127 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.526280 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.539815 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.938227 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.938498 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:32.938449886 +0000 UTC m=+26.920413092 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.938679 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:28 crc kubenswrapper[4792]: I0121 17:56:28.938761 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.939037 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.939198 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:32.939175575 +0000 UTC m=+26.921138761 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.939049 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:28 crc kubenswrapper[4792]: E0121 17:56:28.939347 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:32.93933933 +0000 UTC m=+26.921302516 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:29 crc kubenswrapper[4792]: I0121 17:56:29.040256 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:29 crc kubenswrapper[4792]: I0121 17:56:29.040333 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.040591 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.040623 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.040639 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.040726 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:33.04070226 +0000 UTC m=+27.022665446 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.040992 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.041103 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.041164 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.041291 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:33.041270215 +0000 UTC m=+27.023233391 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:29 crc kubenswrapper[4792]: I0121 17:56:29.213355 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 17:39:17.498961985 +0000 UTC Jan 21 17:56:29 crc kubenswrapper[4792]: I0121 17:56:29.246005 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:29 crc kubenswrapper[4792]: E0121 17:56:29.246337 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.043795 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6ckvd"] Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.044334 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.047424 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.048041 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.049704 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.049795 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.068877 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.086166 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.107842 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.122050 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.137893 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.154989 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/145eca9e-18bd-4006-9768-30bfcc753c06-host\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.155216 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rtq5\" (UniqueName: \"kubernetes.io/projected/145eca9e-18bd-4006-9768-30bfcc753c06-kube-api-access-6rtq5\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.155281 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/145eca9e-18bd-4006-9768-30bfcc753c06-serviceca\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.157928 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.172177 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.187817 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.200351 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.211385 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.214220 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 08:43:11.851330163 +0000 UTC Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.223160 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.244362 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.246611 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.246611 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:30 crc kubenswrapper[4792]: E0121 17:56:30.246813 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:30 crc kubenswrapper[4792]: E0121 17:56:30.247020 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.256530 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rtq5\" (UniqueName: \"kubernetes.io/projected/145eca9e-18bd-4006-9768-30bfcc753c06-kube-api-access-6rtq5\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.256576 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/145eca9e-18bd-4006-9768-30bfcc753c06-serviceca\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.256615 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/145eca9e-18bd-4006-9768-30bfcc753c06-host\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.256680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/145eca9e-18bd-4006-9768-30bfcc753c06-host\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.260691 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.276144 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:30 crc kubenswrapper[4792]: I0121 17:56:30.290492 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.215061 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 07:35:21.376067053 +0000 UTC Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.587940 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.590180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.590229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.590245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.590428 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.599976 4792 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.600253 4792 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.601645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.601723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.601738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.601763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.601777 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.625912 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.634559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.634641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.634669 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.634723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.634754 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.637559 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.654427 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.659250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.659396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.659471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.659539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.659608 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.671364 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.676998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.677048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.677060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.677078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.677093 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.694026 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.698803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.699005 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.699071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.699148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.699214 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.714688 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:31 crc kubenswrapper[4792]: E0121 17:56:31.715016 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.717419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.717462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.717476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.717495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.717506 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.820522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.820578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.820590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.820610 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.820626 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.923372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.923418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.923428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.923444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:31 crc kubenswrapper[4792]: I0121 17:56:31.923457 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:31Z","lastTransitionTime":"2026-01-21T17:56:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.025954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.026045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.026069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.026099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.026118 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.084845 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.084961 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.085057 4792 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.839s" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.085238 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.085236 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.085306 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.085359 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.085722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/145eca9e-18bd-4006-9768-30bfcc753c06-serviceca\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.085769 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.085893 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.086607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.087480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d8728e15-00c6-4fa7-a79a-cee551b64c18-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6lc6z\" (UID: \"d8728e15-00c6-4fa7-a79a-cee551b64c18\") " pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.089351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.089367 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rtq5\" (UniqueName: \"kubernetes.io/projected/145eca9e-18bd-4006-9768-30bfcc753c06-kube-api-access-6rtq5\") pod \"node-ca-6ckvd\" (UID: \"145eca9e-18bd-4006-9768-30bfcc753c06\") " pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.090494 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnmzx\" (UniqueName: \"kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx\") pod \"ovnkube-node-8t4xq\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.140232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.140291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.140302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.140323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.140335 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.215773 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 09:37:16.280570625 +0000 UTC Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.243098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.243139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.243147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.243160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.243169 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.276016 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.346265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.346299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.346310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.346331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.346346 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.371147 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.384996 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6ckvd" Jan 21 17:56:32 crc kubenswrapper[4792]: W0121 17:56:32.397196 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10549a02_b482_4bc0_a770_65dbb57f340a.slice/crio-493301990d1115a0e0327f815a7eef1bf32ed93b3085913126cf1a7d86cde676 WatchSource:0}: Error finding container 493301990d1115a0e0327f815a7eef1bf32ed93b3085913126cf1a7d86cde676: Status 404 returned error can't find the container with id 493301990d1115a0e0327f815a7eef1bf32ed93b3085913126cf1a7d86cde676 Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.433028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.433124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"57ec0791cd0606b0db367f553c9d99a2ca19d3a264361389de22c2e0a38a793a"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.439376 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerStarted","Data":"89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.439441 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerStarted","Data":"ddabca106cabcf78f8e73eee0a9f0865913f43787eaf4ffd327c0798b4960b3c"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.443167 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.445638 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jbz42" event={"ID":"2dc141da-a3db-4ef1-8e59-d0e1d5dee765","Type":"ContainerStarted","Data":"04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.447201 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerStarted","Data":"0373b34f26a8b612441f921cc0774f64231388c84d0cfd697ad1dda8c92f03ac"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6ckvd" event={"ID":"145eca9e-18bd-4006-9768-30bfcc753c06","Type":"ContainerStarted","Data":"9a42ccfbe0a65337ce6138b94da3847bac6442a4762f8d3e72564db1a4f0bbf3"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.448243 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.449998 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"493301990d1115a0e0327f815a7eef1bf32ed93b3085913126cf1a7d86cde676"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.451194 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.467347 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.490144 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.505353 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.518766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.533616 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.549606 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.552192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.552240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.552251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.552275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.552286 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.566524 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.581604 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.597120 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.621921 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.642629 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.655258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.655317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.655329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.655345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.655357 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.663505 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.678000 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.693882 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.707996 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.719833 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.738746 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.751713 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.757337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.757388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.757406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.757430 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.757451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.765968 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.780448 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.793935 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.807618 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.822259 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.837787 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.855553 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.860216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.860254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.860264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.860281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.860293 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.873826 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.891518 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.907960 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.927650 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.963468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.963506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.963516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.963530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.963538 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:32Z","lastTransitionTime":"2026-01-21T17:56:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.988214 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.988348 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:32 crc kubenswrapper[4792]: I0121 17:56:32.988372 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.988450 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.988493 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:40.988481425 +0000 UTC m=+34.970444611 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.988795 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:40.988785964 +0000 UTC m=+34.970749150 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.988882 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:32 crc kubenswrapper[4792]: E0121 17:56:32.988908 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:40.988902447 +0000 UTC m=+34.970865633 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.065931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.065967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.065976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.065989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.065998 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.089869 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.089937 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090076 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090097 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090108 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090152 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:41.090139524 +0000 UTC m=+35.072102710 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090274 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090298 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090312 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:33 crc kubenswrapper[4792]: E0121 17:56:33.090348 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:41.090337229 +0000 UTC m=+35.072300415 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.169649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.169729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.169770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.169815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.169843 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.216664 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 01:36:35.754722192 +0000 UTC Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.274255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.274335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.274353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.274384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.274405 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.377472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.377518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.377531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.377551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.377563 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.456985 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6ckvd" event={"ID":"145eca9e-18bd-4006-9768-30bfcc753c06","Type":"ContainerStarted","Data":"24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.465069 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a" exitCode=0 Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.465182 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.468947 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.472903 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e" exitCode=0 Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.473902 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.478939 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.481789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.481825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.481870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.481892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.481906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.501292 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.519758 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.537964 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.556102 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.570518 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.585801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.585868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.585882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.585905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.585953 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.588930 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.604239 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.625200 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.640934 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.663677 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.678434 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.689314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.689363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.689379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.689404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.689422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.693093 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.716866 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.732442 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.748229 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.764815 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.781361 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.791656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.791688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.791698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.791714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.791726 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.797436 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.813423 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.830381 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.851596 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.869283 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.895222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.895279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.895293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.895336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.895354 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:33Z","lastTransitionTime":"2026-01-21T17:56:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.901048 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.914543 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.932224 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.958249 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.974895 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:33 crc kubenswrapper[4792]: I0121 17:56:33.990663 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.000437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.000477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.000489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.000505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.000518 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.010809 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.102591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.102628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.102639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.102655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.102667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.205232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.205281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.205292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.205312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.205324 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.216971 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 19:33:58.723106916 +0000 UTC Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.246423 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.246514 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.246434 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:34 crc kubenswrapper[4792]: E0121 17:56:34.246637 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:34 crc kubenswrapper[4792]: E0121 17:56:34.246746 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:34 crc kubenswrapper[4792]: E0121 17:56:34.246869 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.308164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.308649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.308663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.308683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.308692 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.412597 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.412661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.412681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.412705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.412719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.482651 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622" exitCode=0 Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.482730 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.487441 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.487533 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.487548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.487560 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.487578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.501486 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.516741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.516784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.516794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.516818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.516829 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.517490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.534762 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.549814 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.564021 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.578252 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.594820 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.618970 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.620304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.620352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.620369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.620397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.620419 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.635376 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.650363 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.670899 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.682324 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.693906 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.708166 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.721972 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.723528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.723558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.723571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.723592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.723605 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.825611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.825650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.825664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.825682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.825694 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.929030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.929103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.929116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.929142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:34 crc kubenswrapper[4792]: I0121 17:56:34.929156 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:34Z","lastTransitionTime":"2026-01-21T17:56:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.031629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.031673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.031683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.031700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.031712 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.134967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.135011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.135023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.135045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.135059 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.217417 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 07:31:13.403149738 +0000 UTC Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.238067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.238128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.238144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.238170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.238188 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.340659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.340704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.340715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.340729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.340740 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.444428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.444457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.444464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.444479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.444489 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.496216 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.499419 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6" exitCode=0 Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.499484 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.522319 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.538319 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.546899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.546946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.546957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.546978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.546989 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.559250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.574386 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.588127 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.603980 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.620257 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.635340 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.648475 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.651072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.651100 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.651111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.651131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.651142 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.663434 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.683683 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.699407 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.712728 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.728755 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.752404 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:35Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.754260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.754309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.754321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.754343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.754356 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.861277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.861336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.861348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.861364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.861373 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.965085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.965149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.965162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.965183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:35 crc kubenswrapper[4792]: I0121 17:56:35.965196 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:35Z","lastTransitionTime":"2026-01-21T17:56:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.068579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.068634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.068646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.068668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.068681 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.171380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.171443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.171458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.171482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.171497 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.218372 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 04:24:01.528233557 +0000 UTC Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.246101 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.246257 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:36 crc kubenswrapper[4792]: E0121 17:56:36.246353 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.246375 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:36 crc kubenswrapper[4792]: E0121 17:56:36.246495 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:36 crc kubenswrapper[4792]: E0121 17:56:36.246576 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.269103 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.274099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.274142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.274155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.274180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.274197 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.286106 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.301962 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.316703 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.337458 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.352917 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.366200 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.377618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.377672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.377685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.377705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.377720 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.386861 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.405016 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.420695 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.431958 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.446117 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.459530 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.471361 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.481757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.481826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.481877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.481907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.481922 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.489054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.506101 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797" exitCode=0 Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.506162 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.534839 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.562770 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.578758 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.584673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.584709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.584719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.584737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.584748 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.598806 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.617024 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.630493 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.646174 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.660594 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.694143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.696141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.696179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.696190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.696209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.696221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.721204 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.755614 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.770078 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.786628 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.799271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.799305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.799315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.799336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.799349 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.806017 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.820613 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.902302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.902376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.902392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.902415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:36 crc kubenswrapper[4792]: I0121 17:56:36.902429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:36Z","lastTransitionTime":"2026-01-21T17:56:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.006392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.006456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.006469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.006492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.006504 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.110825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.110884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.110896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.110917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.110931 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.214345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.214394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.214407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.214433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.214448 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.219097 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 13:47:29.850139726 +0000 UTC Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.317596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.317642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.317653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.317674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.317689 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.420547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.420602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.420615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.420636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.420650 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.512132 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20" exitCode=0 Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.512189 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.529031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.529113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.529133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.529157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.529196 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.533318 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.534837 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.554137 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.571119 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.586595 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.603232 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.622332 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.640195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.640870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.640928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.640951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.640964 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.642120 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.655909 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.673514 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.695622 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.704575 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.718824 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.735301 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.744112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.744147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.744159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.744174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.744185 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.751017 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.762966 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.847093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.847236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.847254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.847323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.847345 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.952627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.952675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.952684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.952707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:37 crc kubenswrapper[4792]: I0121 17:56:37.952722 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:37Z","lastTransitionTime":"2026-01-21T17:56:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.055510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.055578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.055594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.055618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.055634 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.158783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.158842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.158879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.158901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.158917 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.219936 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 19:00:30.133284186 +0000 UTC Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.246275 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:38 crc kubenswrapper[4792]: E0121 17:56:38.246784 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.246907 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.247074 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:38 crc kubenswrapper[4792]: E0121 17:56:38.247230 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:38 crc kubenswrapper[4792]: E0121 17:56:38.247580 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.264391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.264436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.264450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.264471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.264485 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.368617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.368666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.368679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.368698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.368713 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.471304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.471350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.471360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.471377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.471389 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.520816 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.542586 4792 generic.go:334] "Generic (PLEG): container finished" podID="d8728e15-00c6-4fa7-a79a-cee551b64c18" containerID="f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7" exitCode=0 Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.542629 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerDied","Data":"f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.561891 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.578154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.578197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.578208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.578225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.578236 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.581804 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.598080 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.613264 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.630659 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.649916 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.669602 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.682384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.683156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.683202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.683229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.683588 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.685463 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.703754 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.725958 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.742679 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.759424 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.780057 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.788563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.788617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.788628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.788648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.788661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.797428 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.812234 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.891087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.891127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.891137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.891153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.891165 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.995328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.995380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.995392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.995411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:38 crc kubenswrapper[4792]: I0121 17:56:38.995424 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:38Z","lastTransitionTime":"2026-01-21T17:56:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.097897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.097943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.097952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.097969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.097979 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.200355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.200392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.200400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.200416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.200425 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.221054 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 14:25:57.791800489 +0000 UTC Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.303577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.303633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.303651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.303677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.303692 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.406940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.406994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.407004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.407021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.407034 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.510195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.510285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.510306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.510334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.510351 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.553568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.554099 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.554154 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.558700 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" event={"ID":"d8728e15-00c6-4fa7-a79a-cee551b64c18","Type":"ContainerStarted","Data":"429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.571061 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.587030 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.611438 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.613465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.613513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.613524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.613543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.613555 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.628035 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.645001 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.658742 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.677629 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.682549 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.686097 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.696225 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.712528 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.716459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.716501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.716510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.716528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.716542 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.730128 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.750932 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.768212 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.787828 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.803653 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.819764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.819881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.819897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.819919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.819932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.825410 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.838604 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.850790 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.866375 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.884082 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.899059 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.914724 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.922414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.922467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.922482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.922499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.922513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:39Z","lastTransitionTime":"2026-01-21T17:56:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.931401 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.945978 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.960089 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.975103 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:39 crc kubenswrapper[4792]: I0121 17:56:39.995376 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.008124 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.020841 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.024474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.024513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.024524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.024540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.024552 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.041347 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.052446 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.126651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.126945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.127029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.127129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.127191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.221996 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 13:41:29.653307115 +0000 UTC Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.230024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.230119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.230144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.230175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.230213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.246236 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.246273 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:40 crc kubenswrapper[4792]: E0121 17:56:40.246503 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:40 crc kubenswrapper[4792]: E0121 17:56:40.246497 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.246300 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:40 crc kubenswrapper[4792]: E0121 17:56:40.246585 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.337271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.337455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.337552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.337588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.337597 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.441225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.441276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.441286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.441305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.441316 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.544775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.545031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.545090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.545208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.545303 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.562640 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.649947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.650048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.650073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.650279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.650380 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.754420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.754497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.754514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.754538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.754554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.858628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.858729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.858760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.858795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.858818 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.967244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.967396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.967422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.967460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:40 crc kubenswrapper[4792]: I0121 17:56:40.967493 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:40Z","lastTransitionTime":"2026-01-21T17:56:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.071454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.071528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.071547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.071575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.071594 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.088223 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.088561 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:56:57.088511395 +0000 UTC m=+51.070474621 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.089020 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.089120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.089395 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.089463 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:57.089447389 +0000 UTC m=+51.071410615 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.090192 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.090257 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:57.090239619 +0000 UTC m=+51.072202855 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.175110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.175169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.175182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.175203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.175215 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.191229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.191772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.191484 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.192274 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.192511 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.191936 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.192831 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.192871 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.192807 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:57.192768654 +0000 UTC m=+51.174731880 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:41 crc kubenswrapper[4792]: E0121 17:56:41.193357 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:56:57.193330868 +0000 UTC m=+51.175294094 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.222158 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 01:04:27.569660879 +0000 UTC Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.278482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.278557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.278570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.278596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.278612 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.382178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.382232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.382244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.382266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.382279 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.485155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.485189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.485200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.485216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.485229 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.565207 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.587834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.587902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.587916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.587937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.587949 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.692038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.692116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.692137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.692168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.692189 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.794878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.794933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.794949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.794969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.794983 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.883565 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps"] Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.884393 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.887652 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.887732 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.898053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.898116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.898137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.898167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.898188 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:41Z","lastTransitionTime":"2026-01-21T17:56:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.904603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.918102 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.935032 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.955898 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.974907 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.991932 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.999705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.999842 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:41 crc kubenswrapper[4792]: I0121 17:56:41.999915 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.000105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnvmp\" (UniqueName: \"kubernetes.io/projected/a4a2d61b-a0b0-4073-bd76-665e9fa19250-kube-api-access-fnvmp\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.001394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.001440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.001455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.001478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.001490 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.006140 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.018600 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.033152 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.048278 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.061524 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.075675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.075713 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.075727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.075746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.075759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.081404 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.092704 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.097574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.097625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.097639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.097658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.097672 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.101576 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.101661 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.101731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnvmp\" (UniqueName: \"kubernetes.io/projected/a4a2d61b-a0b0-4073-bd76-665e9fa19250-kube-api-access-fnvmp\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.101798 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.102702 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.102775 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.107037 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.112703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4a2d61b-a0b0-4073-bd76-665e9fa19250-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.117833 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.124433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.125093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.125122 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.125145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.125159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.126451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnvmp\" (UniqueName: \"kubernetes.io/projected/a4a2d61b-a0b0-4073-bd76-665e9fa19250-kube-api-access-fnvmp\") pod \"ovnkube-control-plane-749d76644c-vrcps\" (UID: \"a4a2d61b-a0b0-4073-bd76-665e9fa19250\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.127027 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.138193 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.140562 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.143489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.143630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.143670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.143697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.143713 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.157803 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.162020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.162073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.162086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.162108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.162121 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.167015 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.176623 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.176764 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.178872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.178907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.178920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.178963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.178978 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.209355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.223348 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 22:08:14.412776217 +0000 UTC Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.248395 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.248580 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.249365 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.249452 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.250991 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:42 crc kubenswrapper[4792]: E0121 17:56:42.251075 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.282649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.282692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.282701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.282720 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.282736 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.385087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.385137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.385151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.385172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.385186 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.487702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.487750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.487763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.487785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.487801 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.572243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" event={"ID":"a4a2d61b-a0b0-4073-bd76-665e9fa19250","Type":"ContainerStarted","Data":"2de5d519782e78eafe9c65b4034fda8313e2174421eb57e1d63b08e04aec29ff"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.591112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.591192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.591215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.591246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.591267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.694755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.694842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.694913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.694962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.694984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.798709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.798774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.798789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.798816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.798834 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.902958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.903054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.903075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.903115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:42 crc kubenswrapper[4792]: I0121 17:56:42.903143 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:42Z","lastTransitionTime":"2026-01-21T17:56:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.008483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.008559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.008579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.008614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.008635 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.033497 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ddsqn"] Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.034325 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: E0121 17:56:43.034457 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.061059 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.082327 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.098527 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.111968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112062 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112735 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.112800 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgmns\" (UniqueName: \"kubernetes.io/projected/97b1a1f0-3533-44d9-8c10-9feb31d988ea-kube-api-access-bgmns\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.114456 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.129032 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.146168 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.161745 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.185901 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.208528 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.213760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.213818 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgmns\" (UniqueName: \"kubernetes.io/projected/97b1a1f0-3533-44d9-8c10-9feb31d988ea-kube-api-access-bgmns\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: E0121 17:56:43.214898 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:43 crc kubenswrapper[4792]: E0121 17:56:43.215019 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:56:43.714978801 +0000 UTC m=+37.696941987 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.215463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.215497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.215509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.215527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.215540 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.224205 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 11:51:28.557225672 +0000 UTC Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.240511 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.246892 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgmns\" (UniqueName: \"kubernetes.io/projected/97b1a1f0-3533-44d9-8c10-9feb31d988ea-kube-api-access-bgmns\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.263817 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.280287 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.294639 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.312096 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.318148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.318197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.318206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.318223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.318235 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.329432 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.343608 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.366500 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.421632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.421675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.421684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.421704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.421715 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.524580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.525246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.525341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.525462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.525548 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.578247 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" event={"ID":"a4a2d61b-a0b0-4073-bd76-665e9fa19250","Type":"ContainerStarted","Data":"3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.580999 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/0.log" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.584467 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444" exitCode=1 Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.584521 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.585538 4792 scope.go:117] "RemoveContainer" containerID="ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.600059 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.615541 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.628917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.628984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.629058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.629085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.629099 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.637826 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.652477 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.669588 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.683830 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.699240 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.714597 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.720069 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:43 crc kubenswrapper[4792]: E0121 17:56:43.721111 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:43 crc kubenswrapper[4792]: E0121 17:56:43.721156 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:56:44.721142538 +0000 UTC m=+38.703105724 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.727936 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.733226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.733272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.733309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.733326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.733374 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.739723 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.751119 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.759538 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.785613 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.801631 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.818771 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.832436 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.835621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.835670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.835680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.835694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.835703 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.849048 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.939400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.939468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.939482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.939502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:43 crc kubenswrapper[4792]: I0121 17:56:43.939517 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:43Z","lastTransitionTime":"2026-01-21T17:56:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.042820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.042933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.042954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.042984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.043003 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.146690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.146731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.146741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.146756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.146766 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.224691 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 07:34:06.905988174 +0000 UTC Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.246858 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.246993 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.247149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.247206 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.247335 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.247411 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.247643 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.248097 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.248901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.248963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.249305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.249325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.249339 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.352030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.352116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.352139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.352167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.352187 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.455094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.455137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.455146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.455162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.455172 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.558125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.558200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.558213 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.558234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.558247 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.661456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.661547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.661571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.661602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.661622 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.731570 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.731961 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:44 crc kubenswrapper[4792]: E0121 17:56:44.732162 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:56:46.732115408 +0000 UTC m=+40.714078794 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.764474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.764506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.764516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.764549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.764559 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.868102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.868180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.868191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.868211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.868241 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.971889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.971955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.971970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.971995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:44 crc kubenswrapper[4792]: I0121 17:56:44.972009 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:44Z","lastTransitionTime":"2026-01-21T17:56:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.074326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.074367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.074376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.074389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.074398 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.178281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.178359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.178382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.178418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.178443 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.225509 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 14:09:20.428138226 +0000 UTC Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.281150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.281177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.281185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.281197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.281207 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.384343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.384439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.384463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.384499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.384531 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.489390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.489462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.489496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.489523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.489539 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.592146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.592205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.592240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.592259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.592273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.600762 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/0.log" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.603912 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.604148 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.607418 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" event={"ID":"a4a2d61b-a0b0-4073-bd76-665e9fa19250","Type":"ContainerStarted","Data":"d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.627426 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.650340 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.668808 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.686289 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.694787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.694834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.694860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.694882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.694894 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.703530 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.720091 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.735496 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.755509 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.774889 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.797475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.797545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.797556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.797578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.797593 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.805207 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.822208 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.836604 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.861073 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.880936 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.895957 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.899892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.899925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.899934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.899950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.899960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:45Z","lastTransitionTime":"2026-01-21T17:56:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.915039 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.933179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.944681 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.957260 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.976679 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:45 crc kubenswrapper[4792]: I0121 17:56:45.989409 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.001935 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:45Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.002540 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.002574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.002587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.002612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.002625 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.015305 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.029198 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.048948 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.065326 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.079179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.093748 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105713 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.105989 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.119593 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.134781 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.150595 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.166928 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.186653 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.208476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.208541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.208551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.208574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.208588 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.225668 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 01:10:20.049380576 +0000 UTC Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.246224 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.246345 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.246423 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.246359 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.246473 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.246604 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.246737 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.246876 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.268587 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.284388 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.300673 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.311214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.311252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.311263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.311277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.311289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.314090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.325926 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.349130 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.362975 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.376197 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.390625 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.409506 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.413882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.413947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.413965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.413994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.414012 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.420765 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.440754 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.461769 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.477680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.493877 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.508305 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.516807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.516896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.516917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.516943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.516958 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.524547 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.619685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.619752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.619772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.619796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.619818 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.723696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.723747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.723756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.723774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.723783 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.765487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.765716 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:46 crc kubenswrapper[4792]: E0121 17:56:46.765873 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:56:50.765825011 +0000 UTC m=+44.747788197 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.825726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.825781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.825793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.825807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.825817 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.928431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.928477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.928488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.928504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:46 crc kubenswrapper[4792]: I0121 17:56:46.928515 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:46Z","lastTransitionTime":"2026-01-21T17:56:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.031055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.031092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.031102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.031117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.031129 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.133493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.133523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.133535 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.133567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.133576 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.226552 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 22:09:38.095420797 +0000 UTC Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.236654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.236695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.236708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.236725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.236737 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.339673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.339710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.339719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.339736 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.339746 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.442765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.442809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.442832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.442916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.442935 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.545191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.545221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.545230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.545243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.545252 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.618217 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/1.log" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.619247 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/0.log" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.622584 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78" exitCode=1 Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.622693 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.622779 4792 scope.go:117] "RemoveContainer" containerID="ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.623904 4792 scope.go:117] "RemoveContainer" containerID="b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78" Jan 21 17:56:47 crc kubenswrapper[4792]: E0121 17:56:47.624476 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.637252 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.647988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.648120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.648200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.648276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.648357 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.653008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.688801 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:46Z\\\",\\\"message\\\":\\\"tart default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z]\\\\nI0121 17:56:46.315265 6232 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-m5d6x in node crc\\\\nI0121 17:56:46.315272 6232 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-m5d6x after 0 failed attempt(s)\\\\nI0121 17:56:46.315284 6232 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-m5d6x\\\\nI0121 17:56:46.315270 6232 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-ddsqn before timer (time: 2026-01-21 17:56:47.5370\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.709746 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.725683 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.739719 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.750806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.751035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.751116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.751206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.751267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.755957 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.779024 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.794558 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.810559 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.832066 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.847672 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.854695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.854782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.854798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.854823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.854840 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.882235 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.904823 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.925594 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.945806 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.958321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.958363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.958377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.958433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.958446 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:47Z","lastTransitionTime":"2026-01-21T17:56:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:47 crc kubenswrapper[4792]: I0121 17:56:47.964964 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.061485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.061538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.061554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.061580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.061596 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.165937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.165995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.166006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.166023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.166039 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.227972 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 05:40:40.114211179 +0000 UTC Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.246030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.246023 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.246116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:48 crc kubenswrapper[4792]: E0121 17:56:48.246596 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:48 crc kubenswrapper[4792]: E0121 17:56:48.246728 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:48 crc kubenswrapper[4792]: E0121 17:56:48.246803 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.246330 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:48 crc kubenswrapper[4792]: E0121 17:56:48.247538 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.269652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.269705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.269721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.269747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.269769 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.373120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.373580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.373785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.374056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.374313 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.478534 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.479022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.479405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.479823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.480092 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.583662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.584292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.584382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.584505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.584585 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.687751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.687793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.687812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.687829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.687842 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.791732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.791794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.791816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.791878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.791896 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.895206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.895274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.895286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.895311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.895396 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.998969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.999025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.999038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.999060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:48 crc kubenswrapper[4792]: I0121 17:56:48.999077 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:48Z","lastTransitionTime":"2026-01-21T17:56:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.101563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.101627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.101639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.101655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.101666 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.204545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.204597 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.204609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.204624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.204634 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.228551 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 19:28:09.306893023 +0000 UTC Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.307491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.307530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.307544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.307563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.307574 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.410577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.410624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.410635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.410659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.410673 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.513349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.513398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.513410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.513432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.513454 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.617232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.617461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.617488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.617516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.617537 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.640528 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/1.log" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.722089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.722493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.722626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.722764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.722940 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.827193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.827602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.827717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.827812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.827905 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.939019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.939092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.939109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.939127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:49 crc kubenswrapper[4792]: I0121 17:56:49.939140 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:49Z","lastTransitionTime":"2026-01-21T17:56:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.042795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.042836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.042873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.042890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.042901 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.146694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.146766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.146782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.146805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.146817 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.228991 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:08:26.228469003 +0000 UTC Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.246577 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.246670 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.246705 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.246886 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.246841 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.247204 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.247268 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.247356 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.251138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.251172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.251182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.251199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.251210 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.354012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.354096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.354107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.354134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.354145 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.457029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.457075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.457085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.457103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.457113 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.560585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.560633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.560646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.560662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.560672 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.663331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.663679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.663746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.663811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.663893 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.766109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.766145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.766153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.766169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.766179 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.812927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.813067 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:50 crc kubenswrapper[4792]: E0121 17:56:50.813133 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:56:58.813115086 +0000 UTC m=+52.795078282 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.869216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.869261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.869276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.869299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.869316 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.973592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.973658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.973677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.973708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:50 crc kubenswrapper[4792]: I0121 17:56:50.973729 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:50Z","lastTransitionTime":"2026-01-21T17:56:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.076300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.076335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.076347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.076363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.076374 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.179461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.179519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.179531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.179553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.179563 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.229972 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 08:44:20.699239394 +0000 UTC Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.282764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.282889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.282914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.282946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.282967 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.386126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.386177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.386188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.386207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.386220 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.489115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.489147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.489156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.489170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.489180 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.591697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.591732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.591741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.591756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.591765 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.695869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.695930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.695943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.695962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.695984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.799099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.799148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.799161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.799184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.799201 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.903328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.903382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.903397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.903420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:51 crc kubenswrapper[4792]: I0121 17:56:51.903433 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:51Z","lastTransitionTime":"2026-01-21T17:56:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.006464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.006517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.006526 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.006554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.006566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.109424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.109480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.109494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.109518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.109530 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.209246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.209319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.209330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.209368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.209383 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.223046 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:52Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.230753 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 23:17:27.665630015 +0000 UTC Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.233885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.234217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.234503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.234590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.234693 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.245685 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.245831 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.245830 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.245886 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.245966 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.246156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.246190 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.246454 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.249105 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:52Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.252685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.252723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.252734 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.252751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.252762 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.263600 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:52Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.266816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.266857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.266866 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.266878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.266888 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.280660 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:52Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.285792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.285829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.285840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.285870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.285883 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.299342 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:52Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:52 crc kubenswrapper[4792]: E0121 17:56:52.299462 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.301614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.301652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.301662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.301687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.301708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.404432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.404515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.404532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.404560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.404577 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.507928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.507996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.508015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.508045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.508095 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.611515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.611545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.611556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.611571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.611581 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.714584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.715094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.715213 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.715309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.715388 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.819636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.819694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.819710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.819739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.819761 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.923907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.923983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.924002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.924024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:52 crc kubenswrapper[4792]: I0121 17:56:52.924036 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:52Z","lastTransitionTime":"2026-01-21T17:56:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.026453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.026496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.026505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.026520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.026533 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.129746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.129811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.129827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.129879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.129897 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.231732 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 20:00:42.041211364 +0000 UTC Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.234366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.234423 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.234437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.234458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.234471 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.337475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.337564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.337578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.337597 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.337607 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.440537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.440622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.440635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.440652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.440663 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.542683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.542728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.542738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.542757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.542767 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.646031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.646093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.646111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.646133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.646147 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.748412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.748463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.748475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.748493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.748507 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.851609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.851647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.851657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.851673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.851684 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.954400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.954466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.954479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.954501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:53 crc kubenswrapper[4792]: I0121 17:56:53.954514 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:53Z","lastTransitionTime":"2026-01-21T17:56:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.057566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.057639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.057650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.057665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.057676 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.161031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.161099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.161110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.161133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.161151 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.232357 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 09:28:50.900564489 +0000 UTC Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.245767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.245872 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.245806 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.245767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:54 crc kubenswrapper[4792]: E0121 17:56:54.245986 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:54 crc kubenswrapper[4792]: E0121 17:56:54.246128 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:54 crc kubenswrapper[4792]: E0121 17:56:54.246335 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:54 crc kubenswrapper[4792]: E0121 17:56:54.246430 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.262912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.262968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.262983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.263004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.263021 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.366105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.366421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.366519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.366592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.366656 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.469897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.469944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.469977 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.469996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.470005 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.574192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.574266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.574284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.574312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.574333 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.678610 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.678697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.678723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.678763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.678790 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.782730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.783285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.783561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.783764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.784593 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.888513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.888613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.888643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.888686 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.888714 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.992271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.992330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.992342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.992364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:54 crc kubenswrapper[4792]: I0121 17:56:54.992378 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:54Z","lastTransitionTime":"2026-01-21T17:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.096367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.096433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.096443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.096496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.096513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.199698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.199756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.199771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.199794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.199808 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.232921 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 03:16:38.38432742 +0000 UTC Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.303505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.303555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.303565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.303583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.303591 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.406938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.407454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.407599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.407756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.407932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.511953 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.512598 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.512811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.513503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.513908 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.617780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.617887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.617907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.617939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.617959 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.721346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.721403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.721419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.721437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.721453 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.824717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.824774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.824790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.824842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.824879 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.928716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.928781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.928795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.928819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:55 crc kubenswrapper[4792]: I0121 17:56:55.928835 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:55Z","lastTransitionTime":"2026-01-21T17:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.032670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.032729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.032750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.032774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.032790 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.136968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.137532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.137650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.137770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.137895 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.234563 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 05:28:26.895360229 +0000 UTC Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.240422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.240460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.240468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.240483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.240493 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.246141 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.246202 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.246282 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:56 crc kubenswrapper[4792]: E0121 17:56:56.246367 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.246464 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:56 crc kubenswrapper[4792]: E0121 17:56:56.246512 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:56 crc kubenswrapper[4792]: E0121 17:56:56.246687 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:56 crc kubenswrapper[4792]: E0121 17:56:56.246952 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.258464 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.270757 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.290133 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ecb7f52f90906ca0d0e42d422f99503b4aa7fccd81f50fd1fca6d382885cf444\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"7 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.274889 6067 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.275679 6067 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0121 17:56:42.275929 6067 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:56:42.276632 6067 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:56:42.276684 6067 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:56:42.276721 6067 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:56:42.276756 6067 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:56:42.276781 6067 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:56:42.276805 6067 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:56:42.276946 6067 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:56:42.277002 6067 factory.go:656] Stopping watch factory\\\\nI0121 17:56:42.277017 6067 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:56:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:46Z\\\",\\\"message\\\":\\\"tart default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z]\\\\nI0121 17:56:46.315265 6232 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-m5d6x in node crc\\\\nI0121 17:56:46.315272 6232 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-m5d6x after 0 failed attempt(s)\\\\nI0121 17:56:46.315284 6232 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-m5d6x\\\\nI0121 17:56:46.315270 6232 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-ddsqn before timer (time: 2026-01-21 17:56:47.5370\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.305090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.328045 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.340445 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.342525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.342556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.342566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.342586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.342602 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.356566 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.368415 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.380660 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.392379 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.403658 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.416615 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.435257 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.445868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.445923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.445936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.445957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.445971 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.449064 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.461708 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.477183 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.501552 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.550488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.550575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.550593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.550626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.550645 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.653935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.654035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.654047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.654066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.654078 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.757731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.757820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.757834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.757867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.757883 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.860588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.860652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.860662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.860687 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.860700 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.965213 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.965286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.965313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.965346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:56 crc kubenswrapper[4792]: I0121 17:56:56.965367 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:56Z","lastTransitionTime":"2026-01-21T17:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.068801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.068878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.068890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.068911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.068925 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.171628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.171660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.171669 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.171683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.171693 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.180589 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.180720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.180763 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.180823 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:57:29.180789301 +0000 UTC m=+83.162752487 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.180920 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.181009 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:57:29.180985886 +0000 UTC m=+83.162949242 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.181097 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.181142 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:57:29.18113317 +0000 UTC m=+83.163096356 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.235066 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 10:22:39.481577124 +0000 UTC Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.273820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.273880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.273889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.273902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.273911 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.281569 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.281599 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281708 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281727 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281725 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281737 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281745 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281753 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281782 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:57:29.281770196 +0000 UTC m=+83.263733382 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:57 crc kubenswrapper[4792]: E0121 17:56:57.281796 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:57:29.281790387 +0000 UTC m=+83.263753573 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.376684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.376742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.376757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.376772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.376783 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.479262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.479314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.479325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.479340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.479351 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.582421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.582470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.582478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.582494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.582508 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.685517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.685571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.685586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.685604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.685618 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.790134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.790215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.790238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.790272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.790296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.893933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.895195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.895462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.895655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.895928 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:57Z","lastTransitionTime":"2026-01-21T17:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:57 crc kubenswrapper[4792]: I0121 17:56:57.999491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.000002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.000142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.000321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.000479 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.103935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.104006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.104025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.104054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.104074 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.208007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.208093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.208112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.208143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.208164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.235695 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 06:39:47.426640419 +0000 UTC Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.245675 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.245765 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.245798 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.245904 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.246075 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.246090 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.246200 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.246289 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.310699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.310745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.310755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.310774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.310786 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.413761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.414304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.414466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.414629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.414818 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.518801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.519343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.519539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.519700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.519877 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.624174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.624685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.624889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.625106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.625254 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.729022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.729487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.729637 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.729777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.729982 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.833688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.833780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.833796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.833831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.833874 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.901386 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.901561 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:58 crc kubenswrapper[4792]: E0121 17:56:58.901636 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:57:14.901616142 +0000 UTC m=+68.883579328 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.936549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.936588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.936601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.936619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.936629 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:58Z","lastTransitionTime":"2026-01-21T17:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.991035 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:58 crc kubenswrapper[4792]: I0121 17:56:58.992315 4792 scope.go:117] "RemoveContainer" containerID="b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.030759 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.043050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.043093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.043112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.043138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.043152 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.057331 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.079086 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.095153 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.110627 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.125893 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.146007 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.147869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.147917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.147926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.147946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.147959 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.161476 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.176999 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.189933 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.202034 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.213915 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.234343 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:46Z\\\",\\\"message\\\":\\\"tart default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z]\\\\nI0121 17:56:46.315265 6232 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-m5d6x in node crc\\\\nI0121 17:56:46.315272 6232 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-m5d6x after 0 failed attempt(s)\\\\nI0121 17:56:46.315284 6232 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-m5d6x\\\\nI0121 17:56:46.315270 6232 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-ddsqn before timer (time: 2026-01-21 17:56:47.5370\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.237100 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 06:45:36.976696189 +0000 UTC Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.248698 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.257652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.257714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.257731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.257764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.257779 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.269825 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.281403 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.291979 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.360915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.360958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.360969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.360989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.361001 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.463839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.463901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.463915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.463935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.463949 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.567448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.567507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.567521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.567608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.567621 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.670775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.670856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.670872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.670894 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.670907 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.687590 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/1.log" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.690529 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.691182 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.708961 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.724181 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.744799 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.758898 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.774164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.774213 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.774221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.774240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.774252 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.777961 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.792102 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.803211 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.817680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.839593 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.857339 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.871758 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.877026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.877095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.877118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.877143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.877166 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.896202 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:46Z\\\",\\\"message\\\":\\\"tart default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z]\\\\nI0121 17:56:46.315265 6232 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-m5d6x in node crc\\\\nI0121 17:56:46.315272 6232 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-m5d6x after 0 failed attempt(s)\\\\nI0121 17:56:46.315284 6232 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-m5d6x\\\\nI0121 17:56:46.315270 6232 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-ddsqn before timer (time: 2026-01-21 17:56:47.5370\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.909443 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.923742 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.943917 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.959114 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.973810 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:59Z is after 2025-08-24T17:21:41Z" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.980606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.980648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.980659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.980679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:56:59 crc kubenswrapper[4792]: I0121 17:56:59.980695 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:56:59Z","lastTransitionTime":"2026-01-21T17:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.084681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.084764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.084787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.084814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.084833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.188928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.189024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.189036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.189059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.189071 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.239060 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 10:22:51.427293565 +0000 UTC Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.246656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.246692 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.246656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:00 crc kubenswrapper[4792]: E0121 17:57:00.246911 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.246933 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:00 crc kubenswrapper[4792]: E0121 17:57:00.247086 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:00 crc kubenswrapper[4792]: E0121 17:57:00.247237 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:00 crc kubenswrapper[4792]: E0121 17:57:00.247426 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.292563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.292655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.292680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.292719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.292744 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.396418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.396481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.396496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.396531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.396547 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.499215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.499271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.499286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.499307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.499505 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.603522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.603579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.603592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.603615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.603629 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.696395 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/2.log" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.697183 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/1.log" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.700454 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" exitCode=1 Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.700546 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.700629 4792 scope.go:117] "RemoveContainer" containerID="b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.701309 4792 scope.go:117] "RemoveContainer" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" Jan 21 17:57:00 crc kubenswrapper[4792]: E0121 17:57:00.701538 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.706541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.706605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.706618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.706640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.706654 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.727662 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.745160 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.761323 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.775253 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.791344 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.804590 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.809981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.810050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.810065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.810089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.810105 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.822824 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.851043 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b341d5797f6ee8e6c25b3e811910f6aee111ceca0e6cc82b6109fddf5b2b2b78\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:56:46Z\\\",\\\"message\\\":\\\"tart default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:56:46Z is after 2025-08-24T17:21:41Z]\\\\nI0121 17:56:46.315265 6232 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-m5d6x in node crc\\\\nI0121 17:56:46.315272 6232 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-m5d6x after 0 failed attempt(s)\\\\nI0121 17:56:46.315284 6232 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-m5d6x\\\\nI0121 17:56:46.315270 6232 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-ddsqn before timer (time: 2026-01-21 17:56:47.5370\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.869816 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.885473 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.897524 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.909614 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.912479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.912521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.912536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.912561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.912575 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:00Z","lastTransitionTime":"2026-01-21T17:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.925356 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.942438 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.959470 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.976407 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:00 crc kubenswrapper[4792]: I0121 17:57:00.990075 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:00Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.016276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.016350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.016365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.016394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.016410 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.119558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.119630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.119643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.119668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.119682 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.223500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.223558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.223569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.223593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.223604 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.239735 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 20:05:42.91948921 +0000 UTC Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.328242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.328300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.328309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.328326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.328336 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.431708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.431770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.431781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.431802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.431816 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.534453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.534587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.534607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.534633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.534649 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.637048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.637094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.637107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.637127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.637139 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.708083 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/2.log" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.714016 4792 scope.go:117] "RemoveContainer" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" Jan 21 17:57:01 crc kubenswrapper[4792]: E0121 17:57:01.714425 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.732716 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.741058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.741124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.741141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.741167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.741186 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.747404 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.760745 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.774907 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.795818 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.813236 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.828267 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.842993 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.844024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.844066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.844076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.844096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.844113 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.856395 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.879410 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.896832 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.913934 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.928770 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:01Z","lastTransitionTime":"2026-01-21T17:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.947985 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.963671 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.977212 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:01 crc kubenswrapper[4792]: I0121 17:57:01.999735 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:01Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.050701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.050753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.050767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.050787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.050802 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.153408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.153481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.153508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.153543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.153568 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.240466 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 12:48:22.471806826 +0000 UTC Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.246254 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.246341 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.246521 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.246601 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.246763 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.246939 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.247048 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.247151 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.256808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.256898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.256918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.256943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.256965 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.360012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.360070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.360082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.360104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.360113 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.463307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.463352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.463362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.463382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.463394 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.464592 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.475941 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.480035 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.498094 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.504653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.504762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.504778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.504799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.504813 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.519473 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.522939 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.525177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.525261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.525275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.525298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.525312 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.538272 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.540361 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.544969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.545208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.545347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.545469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.545595 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.555031 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.560234 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.564576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.564614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.564625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.564648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.564664 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.580077 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.585804 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.589729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.589789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.589802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.589825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.589841 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.593789 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.604505 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: E0121 17:57:02.604772 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.605979 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.607956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.608010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.608025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.608047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.608063 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.621296 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.636120 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.652429 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.665124 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.684685 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.700706 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.710990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.711032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.711041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.711060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.711070 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.714591 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.732119 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.753320 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:02Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.813991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.814057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.814070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.814098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.814114 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.917077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.917125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.917138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.917157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:02 crc kubenswrapper[4792]: I0121 17:57:02.917169 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:02Z","lastTransitionTime":"2026-01-21T17:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.020251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.020366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.020394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.020433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.020461 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.123467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.123512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.123562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.123581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.123595 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.226340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.226373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.226382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.226397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.227002 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.241711 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 21:08:06.019333376 +0000 UTC Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.330257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.330326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.330341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.330373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.330395 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.433285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.433342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.433358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.433382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.433397 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.536731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.536775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.536786 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.536808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.536821 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.640943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.641561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.641800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.642044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.642198 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.746441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.746506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.746520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.746541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.746553 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.849559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.849628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.849653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.849685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.849705 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.953411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.953458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.953469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.953485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:03 crc kubenswrapper[4792]: I0121 17:57:03.953495 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:03Z","lastTransitionTime":"2026-01-21T17:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.057943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.058436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.058449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.058469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.058484 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.161415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.161511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.161525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.161552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.161568 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.242447 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 16:06:24.801541521 +0000 UTC Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.246043 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.246043 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.246043 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:04 crc kubenswrapper[4792]: E0121 17:57:04.246663 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:04 crc kubenswrapper[4792]: E0121 17:57:04.246843 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.247059 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:04 crc kubenswrapper[4792]: E0121 17:57:04.247392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:04 crc kubenswrapper[4792]: E0121 17:57:04.247617 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.264980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.265044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.265061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.265086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.265104 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.368706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.368751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.368762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.368782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.368794 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.471320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.471387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.471402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.471425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.471441 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.575334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.575903 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.576063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.576250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.576396 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.680287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.680334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.680347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.680364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.680375 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.784028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.784095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.784115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.784144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.784164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.888101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.888186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.888203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.888235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.888249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.991157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.991600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.991746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.991912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:04 crc kubenswrapper[4792]: I0121 17:57:04.992035 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:04Z","lastTransitionTime":"2026-01-21T17:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.095631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.095693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.095709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.095732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.095750 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.199506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.199552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.199572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.199593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.199606 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.243636 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 00:36:09.148470783 +0000 UTC Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.303252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.303660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.303830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.304190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.304437 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.407253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.407312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.407332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.407357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.407375 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.510644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.510731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.510750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.510783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.510807 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.613660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.614036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.614127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.614299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.614397 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.718303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.718380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.718404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.718437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.718460 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.822553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.822602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.822614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.822635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.822648 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.926020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.926058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.926065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.926079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:05 crc kubenswrapper[4792]: I0121 17:57:05.926090 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:05Z","lastTransitionTime":"2026-01-21T17:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.029056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.029123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.029143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.029171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.029188 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.131870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.131910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.131921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.131934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.131944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.234465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.234893 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.235046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.235151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.235232 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.244900 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:40:34.096623074 +0000 UTC Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.246126 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:06 crc kubenswrapper[4792]: E0121 17:57:06.246291 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.252743 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.252821 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:06 crc kubenswrapper[4792]: E0121 17:57:06.252930 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:06 crc kubenswrapper[4792]: E0121 17:57:06.253043 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.253178 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:06 crc kubenswrapper[4792]: E0121 17:57:06.253556 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.273201 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.292074 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.309549 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.323971 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.337402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.337466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.337564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.337601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.337613 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.344373 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.359835 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.375226 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.397910 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.412570 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.426353 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.438406 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.441188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.441268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.441296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.441313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.441323 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.449984 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.463754 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.477045 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.492347 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.505323 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.520815 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.533255 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:06Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.543675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.543729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.543742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.543760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.543772 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.646930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.646987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.646996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.647010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.647020 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.750743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.750798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.750812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.750834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.750882 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.855139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.855235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.855266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.855314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.855340 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.958740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.958788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.958799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.958816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:06 crc kubenswrapper[4792]: I0121 17:57:06.958826 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:06Z","lastTransitionTime":"2026-01-21T17:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.062789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.062921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.062942 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.062972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.062991 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.166308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.166387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.166402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.166424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.166439 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.246230 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 20:12:30.653423174 +0000 UTC Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.270218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.270269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.270278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.270298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.270308 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.373137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.373185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.373195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.373210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.373221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.475842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.475909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.475921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.475946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.475959 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.579909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.579957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.579970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.579992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.580008 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.682880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.682948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.682965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.682982 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.682995 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.788190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.788284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.788316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.788348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.788364 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.890826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.890909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.890923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.890941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.890970 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.994216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.994273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.994285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.994308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:07 crc kubenswrapper[4792]: I0121 17:57:07.994320 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:07Z","lastTransitionTime":"2026-01-21T17:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.097810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.098129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.098236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.098352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.098510 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.203417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.203469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.203478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.203495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.203512 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.245668 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.245694 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:08 crc kubenswrapper[4792]: E0121 17:57:08.245811 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.245914 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.246195 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:08 crc kubenswrapper[4792]: E0121 17:57:08.246220 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.246509 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:29:42.073608122 +0000 UTC Jan 21 17:57:08 crc kubenswrapper[4792]: E0121 17:57:08.246512 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:08 crc kubenswrapper[4792]: E0121 17:57:08.246633 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.306742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.306801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.306815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.306832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.306869 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.410265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.410315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.410327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.410345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.410360 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.513293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.513375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.513387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.513403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.513414 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.616863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.616909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.616917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.616933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.616944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.720774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.720820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.720831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.720869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.720885 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.824877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.824936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.824950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.824973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.824985 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.928449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.928882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.928956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.929047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:08 crc kubenswrapper[4792]: I0121 17:57:08.929130 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:08Z","lastTransitionTime":"2026-01-21T17:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.031940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.031972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.031981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.031994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.032002 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.135350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.135676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.135755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.135819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.135925 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.238293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.238321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.238329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.238343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.238352 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.247101 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 04:39:46.533256317 +0000 UTC Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.340453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.340823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.340958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.341141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.341233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.444366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.444427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.444447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.444510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.444531 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.546886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.546915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.546924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.546939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.546947 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.649300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.649330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.649337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.649351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.649360 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.751880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.751914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.751921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.751950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.751962 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.854537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.854614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.854626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.854643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.854659 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.957794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.958261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.958369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.958479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:09 crc kubenswrapper[4792]: I0121 17:57:09.958584 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:09Z","lastTransitionTime":"2026-01-21T17:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.061307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.061380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.061461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.061505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.061533 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.164638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.164709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.164729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.164758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.164776 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.246606 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:10 crc kubenswrapper[4792]: E0121 17:57:10.246815 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.246930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:10 crc kubenswrapper[4792]: E0121 17:57:10.247007 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.247543 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:10 crc kubenswrapper[4792]: E0121 17:57:10.247609 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.247657 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 14:49:32.235449413 +0000 UTC Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.247810 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:10 crc kubenswrapper[4792]: E0121 17:57:10.247919 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.267356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.267401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.267411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.267428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.267441 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.370649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.370706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.370718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.370735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.370750 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.474385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.474461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.474478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.474504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.474522 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.578108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.578177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.578187 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.578208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.578221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.680478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.680526 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.680537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.680554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.680563 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.783174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.783237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.783249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.783273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.783288 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.885584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.885623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.885637 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.885663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.885676 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.988071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.988109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.988126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.988143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:10 crc kubenswrapper[4792]: I0121 17:57:10.988153 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:10Z","lastTransitionTime":"2026-01-21T17:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.090834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.090896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.090911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.090930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.090944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.194267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.194346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.194360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.194378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.194392 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.248142 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 11:13:54.915466109 +0000 UTC Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.297069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.297120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.297132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.297151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.297164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.400750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.400811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.400825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.400871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.400888 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.504343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.504400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.504414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.504436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.504454 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.607118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.607177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.607218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.607240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.607251 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.710523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.710570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.710581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.710600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.710613 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.813336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.813390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.813408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.813432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.813451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.915926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.915986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.916004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.916020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:11 crc kubenswrapper[4792]: I0121 17:57:11.916031 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:11Z","lastTransitionTime":"2026-01-21T17:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.018645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.018718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.018765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.018788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.018800 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.121464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.121519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.121531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.121554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.121568 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.224256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.224311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.224322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.224347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.224366 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.246052 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.246093 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.246183 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.246113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.246227 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.246383 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.246658 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.246761 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.249283 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:53:06.914647386 +0000 UTC Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.327483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.327532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.327541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.327557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.327566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.430615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.431022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.431097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.431173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.431235 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.533698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.533740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.533750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.533767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.533779 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.636777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.636867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.636882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.636900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.636915 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.739477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.740181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.740266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.740342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.740441 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.843976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.844045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.844059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.844084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.844098 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.875026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.875092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.875104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.875130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.875147 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.889445 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:12Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.894066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.894125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.894146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.894175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.894191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.911703 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:12Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.916133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.916188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.916200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.916219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.916230 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.931124 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:12Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.935135 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.935214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.935228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.935252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.935548 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.950223 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:12Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.953954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.953992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.954002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.954016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.954026 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.967366 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:12Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:12 crc kubenswrapper[4792]: E0121 17:57:12.967523 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.969180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.969251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.969264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.969292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:12 crc kubenswrapper[4792]: I0121 17:57:12.969307 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:12Z","lastTransitionTime":"2026-01-21T17:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.073214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.073285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.073295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.073332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.073350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.176536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.176591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.176603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.176621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.176632 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.246727 4792 scope.go:117] "RemoveContainer" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" Jan 21 17:57:13 crc kubenswrapper[4792]: E0121 17:57:13.247014 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.249558 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 05:33:04.442027192 +0000 UTC Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.279516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.279562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.279572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.279588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.279597 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.381754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.381802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.381813 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.381834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.381859 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.485421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.485475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.485488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.485511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.485527 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.589051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.589096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.589107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.589127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.589138 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.691606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.691648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.691659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.691679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.691690 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.794260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.794323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.794336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.794355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.794365 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.897367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.897516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.897539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.897564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:13 crc kubenswrapper[4792]: I0121 17:57:13.897579 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:13Z","lastTransitionTime":"2026-01-21T17:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.000714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.000790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.000799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.000818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.000829 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.103602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.103636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.103644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.103657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.103667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.206834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.206892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.206904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.206922 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.206932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.247179 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.247319 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.247203 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:14 crc kubenswrapper[4792]: E0121 17:57:14.247377 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:14 crc kubenswrapper[4792]: E0121 17:57:14.247482 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.247554 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:14 crc kubenswrapper[4792]: E0121 17:57:14.247627 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:14 crc kubenswrapper[4792]: E0121 17:57:14.247692 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.249698 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 15:19:34.905549188 +0000 UTC Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.309667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.309735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.309749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.309779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.309793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.413066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.413105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.413116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.413133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.413145 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.517005 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.517098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.517111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.517133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.517152 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.620396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.620462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.620481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.620505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.620520 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.723378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.723426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.723435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.723449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.723458 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.825744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.825792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.825808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.825825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.825835 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.928717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.928767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.928778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.928798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:14 crc kubenswrapper[4792]: I0121 17:57:14.928810 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:14Z","lastTransitionTime":"2026-01-21T17:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.000425 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:15 crc kubenswrapper[4792]: E0121 17:57:15.000627 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:57:15 crc kubenswrapper[4792]: E0121 17:57:15.000724 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:57:47.000702901 +0000 UTC m=+100.982666147 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.032022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.032089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.032102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.032121 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.032134 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.135017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.135071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.135085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.135105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.135119 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.238199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.238250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.238263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.238285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.238300 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.250729 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 03:07:07.747118094 +0000 UTC Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.340440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.340488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.340502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.340520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.340534 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.443127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.443169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.443183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.443198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.443208 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.545732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.545767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.545775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.545788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.545797 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.648838 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.648931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.648946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.648972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.649003 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.752217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.752262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.752281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.752297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.752308 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.855004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.855039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.855050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.855067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.855077 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.957539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.957607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.957618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.957634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:15 crc kubenswrapper[4792]: I0121 17:57:15.957689 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:15Z","lastTransitionTime":"2026-01-21T17:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.060389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.060440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.060454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.060474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.060488 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.163056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.163105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.163120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.163138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.163148 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.246480 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.246541 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.246539 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:16 crc kubenswrapper[4792]: E0121 17:57:16.246624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.246663 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:16 crc kubenswrapper[4792]: E0121 17:57:16.246732 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:16 crc kubenswrapper[4792]: E0121 17:57:16.246827 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:16 crc kubenswrapper[4792]: E0121 17:57:16.246913 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.251183 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 08:30:08.623323326 +0000 UTC Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.259242 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.265790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.265832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.265876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.265901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.265911 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.273241 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.301377 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.313627 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.326126 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.338328 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.350741 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.361485 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.370421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.370464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.370477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.370494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.370506 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.372614 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.384699 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.399338 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.414862 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.425525 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.446222 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.461581 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.472868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.472905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.472916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.472935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.472946 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.475959 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.489016 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.504181 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.575681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.575723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.575732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.575748 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.575759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.678163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.678198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.678207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.678221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.678230 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.780151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.780195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.780204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.780219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.780229 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.883584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.883624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.883638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.883654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.883667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.986326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.986390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.986400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.986418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:16 crc kubenswrapper[4792]: I0121 17:57:16.986429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:16Z","lastTransitionTime":"2026-01-21T17:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.089705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.089757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.089773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.089790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.089801 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.191912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.191955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.191964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.191979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.191990 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.251721 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 11:00:06.525831137 +0000 UTC Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.293876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.293919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.293931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.293949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.293961 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.396984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.397036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.397046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.397062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.397074 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.499056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.499089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.499100 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.499115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.499124 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.601309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.601337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.601344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.601359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.601368 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.703814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.703874 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.703887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.703945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.703961 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.807251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.807281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.807291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.807306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.807316 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.910707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.910730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.910738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.910750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:17 crc kubenswrapper[4792]: I0121 17:57:17.910759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:17Z","lastTransitionTime":"2026-01-21T17:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.013501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.013534 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.013542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.013555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.013564 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.116055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.116091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.116103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.116119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.116131 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.218964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.219006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.219015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.219031 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.219042 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.245926 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.245924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.246118 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.246143 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:18 crc kubenswrapper[4792]: E0121 17:57:18.246041 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:18 crc kubenswrapper[4792]: E0121 17:57:18.246249 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:18 crc kubenswrapper[4792]: E0121 17:57:18.246318 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:18 crc kubenswrapper[4792]: E0121 17:57:18.246385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.252163 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 15:02:41.536578045 +0000 UTC Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.321379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.321495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.321511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.322017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.322091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.424956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.424992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.425000 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.425013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.425021 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.527554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.527606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.527620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.527637 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.527648 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.630200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.630244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.630255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.630270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.630282 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.732205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.732236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.732246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.732259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.732267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.776884 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/0.log" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.776933 4792 generic.go:334] "Generic (PLEG): container finished" podID="129c7cf1-6a9e-440a-8d4e-049c0652cf6e" containerID="89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc" exitCode=1 Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.776965 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerDied","Data":"89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.777375 4792 scope.go:117] "RemoveContainer" containerID="89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.796027 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.812974 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835164 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835535 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.835562 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.851293 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.867314 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.879492 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.899346 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.912587 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.927129 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.938216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.938254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.938265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.938280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.938291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:18Z","lastTransitionTime":"2026-01-21T17:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.943678 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.958111 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.972151 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:18 crc kubenswrapper[4792]: I0121 17:57:18.982905 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:18Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.002750 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.017095 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.034594 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.041516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.041578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.041591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.041615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.041630 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.047985 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.062868 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.144220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.144254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.144263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.144276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.144286 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.246742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.246991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.247083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.247167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.247296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.252989 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 12:59:03.59588506 +0000 UTC Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.349493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.349820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.349945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.350025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.350094 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.452774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.452817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.452829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.452866 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.452881 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.555229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.555272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.555301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.555346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.555357 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.657991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.658037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.658049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.658064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.658076 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.760533 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.760569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.760578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.760592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.760602 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.783252 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/0.log" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.783316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerStarted","Data":"8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.799640 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.821748 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.837054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.851895 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.863365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.863401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.863410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.863424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.863434 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.865789 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.878087 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.894241 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.915539 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.928967 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.942562 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.953803 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.966367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.966409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.966422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.966437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.966450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:19Z","lastTransitionTime":"2026-01-21T17:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.968347 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.984364 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:19 crc kubenswrapper[4792]: I0121 17:57:19.998396 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.014182 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:20Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.033015 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:20Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.050528 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:20Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069096 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:20Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.069613 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.171835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.171916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.171947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.171961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.171971 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.245795 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.245898 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:20 crc kubenswrapper[4792]: E0121 17:57:20.245962 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.245797 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:20 crc kubenswrapper[4792]: E0121 17:57:20.246057 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.246099 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:20 crc kubenswrapper[4792]: E0121 17:57:20.246180 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:20 crc kubenswrapper[4792]: E0121 17:57:20.246261 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.254040 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 16:13:36.153261131 +0000 UTC Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.274138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.274183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.274195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.274215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.274231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.377134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.377174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.377185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.377202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.377213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.479315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.479350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.479358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.479398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.479409 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.582110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.582144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.582152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.582165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.582174 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.684874 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.684914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.684923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.684939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.684950 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.787138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.787172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.787183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.787196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.787206 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.889744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.889790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.889803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.889818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.889830 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.992170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.992232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.992242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.992255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:20 crc kubenswrapper[4792]: I0121 17:57:20.992263 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:20Z","lastTransitionTime":"2026-01-21T17:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.094957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.094991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.095001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.095017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.095032 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.197672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.197754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.197764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.197779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.197788 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.254687 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 04:15:58.524189347 +0000 UTC Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.299972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.300012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.300023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.300065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.300079 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.402153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.402197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.402209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.402227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.402241 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.504768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.504802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.504810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.504823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.504832 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.607382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.607424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.607437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.607454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.607465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.710755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.710798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.710813 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.710830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.710842 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.813095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.813136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.813146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.813158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.813167 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.915751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.915779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.915788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.915801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:21 crc kubenswrapper[4792]: I0121 17:57:21.915811 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:21Z","lastTransitionTime":"2026-01-21T17:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.018154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.018195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.018205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.018219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.018231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.121033 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.121097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.121113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.121129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.121140 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.225712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.225755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.225768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.225787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.225797 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.246261 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.246276 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.246890 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.246914 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:22 crc kubenswrapper[4792]: E0121 17:57:22.247008 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:22 crc kubenswrapper[4792]: E0121 17:57:22.247111 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:22 crc kubenswrapper[4792]: E0121 17:57:22.247234 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:22 crc kubenswrapper[4792]: E0121 17:57:22.247417 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.255417 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 21:29:16.530923586 +0000 UTC Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.262170 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.328675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.328724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.328733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.328749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.328759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.430902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.430954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.430966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.431001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.431015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.534424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.534466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.534479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.534499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.534513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.637544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.637586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.637594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.637608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.637617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.740108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.740168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.740181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.740201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.740221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.843342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.843392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.843403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.843420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.843431 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.946395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.946440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.946451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.946466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:22 crc kubenswrapper[4792]: I0121 17:57:22.946478 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:22Z","lastTransitionTime":"2026-01-21T17:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.049250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.049296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.049307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.049324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.049335 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.151585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.151635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.151647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.151667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.151678 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.253925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.253973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.253986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.254002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.254015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.256137 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 18:23:53.081553388 +0000 UTC Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.317584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.318049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.318264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.318367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.318442 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.332151 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.336054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.336271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.336359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.336449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.336530 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.348656 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.351879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.351904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.351912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.351939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.351951 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.364309 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.368315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.368347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.368360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.368375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.368385 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.382206 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.386473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.386512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.386522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.386536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.386554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.400736 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:23 crc kubenswrapper[4792]: E0121 17:57:23.400892 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.402468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.402560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.402625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.402685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.402742 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.504586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.504616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.504624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.504636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.504645 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.607253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.607309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.607321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.607380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.607395 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.710963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.711007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.711024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.711041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.711051 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.813788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.813839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.813872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.813887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.813905 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.917774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.917843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.917883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.917905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:23 crc kubenswrapper[4792]: I0121 17:57:23.917920 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:23Z","lastTransitionTime":"2026-01-21T17:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.021186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.021236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.021252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.021273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.021290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.124527 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.124973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.125058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.125164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.125291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.228554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.228582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.228590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.228602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.228615 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.246416 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.246487 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:24 crc kubenswrapper[4792]: E0121 17:57:24.246580 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.246487 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:24 crc kubenswrapper[4792]: E0121 17:57:24.246761 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:24 crc kubenswrapper[4792]: E0121 17:57:24.246934 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.247033 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:24 crc kubenswrapper[4792]: E0121 17:57:24.247178 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.256408 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 09:31:30.003820293 +0000 UTC Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.332153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.332192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.332200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.332215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.332233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.435377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.435409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.435418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.435432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.435440 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.537679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.538004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.538134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.538219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.538305 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.640313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.640391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.640407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.640430 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.640442 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.743147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.743184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.743194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.743208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.743217 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.846151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.846193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.846202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.846218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.846228 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.948743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.948791 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.948800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.948822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:24 crc kubenswrapper[4792]: I0121 17:57:24.948833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:24Z","lastTransitionTime":"2026-01-21T17:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.051943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.051992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.052003 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.052021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.052031 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.154427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.154469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.154483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.154499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.154511 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.256967 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 10:39:12.339062942 +0000 UTC Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.257108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.257152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.257163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.257182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.257192 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.359932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.359995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.360027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.360044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.360055 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.462492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.462538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.462548 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.462566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.462578 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.564724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.564754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.564763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.564778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.564788 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.667296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.667365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.667378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.667406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.667420 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.770310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.770372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.770387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.770412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.770465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.872998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.873035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.873045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.873058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.873075 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.975829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.975898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.975910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.975925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:25 crc kubenswrapper[4792]: I0121 17:57:25.975935 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:25Z","lastTransitionTime":"2026-01-21T17:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.078702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.078752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.078763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.078780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.078793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.180970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.181017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.181029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.181045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.181055 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.246030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.246355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:26 crc kubenswrapper[4792]: E0121 17:57:26.246462 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.246195 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:26 crc kubenswrapper[4792]: E0121 17:57:26.246693 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.246168 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:26 crc kubenswrapper[4792]: E0121 17:57:26.246905 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:26 crc kubenswrapper[4792]: E0121 17:57:26.246762 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.257648 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 04:14:26.440952739 +0000 UTC Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.260496 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.272064 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283755 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.283232 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.302728 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.318186 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.331891 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.345414 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.359756 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.370529 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.382908 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.386343 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.386368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.386376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.386388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.386398 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.395529 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.406872 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.419390 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.431743 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.453053 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.467489 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.481286 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.488435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.488474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.488487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.488503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.488515 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.493440 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.509665 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.590912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.590971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.590985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.591011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.591028 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.694167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.694209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.694221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.694244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.694257 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.796595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.796636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.796645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.796660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.796669 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.900049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.900126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.900137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.900156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:26 crc kubenswrapper[4792]: I0121 17:57:26.900169 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:26Z","lastTransitionTime":"2026-01-21T17:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.003135 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.003176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.003188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.003206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.003217 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.105990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.106060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.106079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.106107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.106124 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.208931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.208980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.208991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.209005 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.209015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.258696 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 18:37:27.528180042 +0000 UTC Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.311785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.311821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.311830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.311860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.311872 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.414232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.414279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.414288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.414302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.414312 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.516550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.516612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.516628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.516651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.516670 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.620365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.620467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.620495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.620542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.620570 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.724815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.724940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.724959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.724989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.725007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.829129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.829231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.829250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.829278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.829297 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.935716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.935777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.935807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.935832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:27 crc kubenswrapper[4792]: I0121 17:57:27.935872 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:27Z","lastTransitionTime":"2026-01-21T17:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.039020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.039066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.039074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.039088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.039098 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.142054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.142144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.142166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.142200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.142222 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245523 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245569 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.245953 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.246028 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:28 crc kubenswrapper[4792]: E0121 17:57:28.246220 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.246389 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:28 crc kubenswrapper[4792]: E0121 17:57:28.246378 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.246465 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:28 crc kubenswrapper[4792]: E0121 17:57:28.246598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:28 crc kubenswrapper[4792]: E0121 17:57:28.246740 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.247956 4792 scope.go:117] "RemoveContainer" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.259814 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 15:05:03.734879229 +0000 UTC Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.349502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.349587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.349612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.349649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.349676 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.452928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.452994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.453016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.453047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.453068 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.557151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.557595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.557662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.557733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.557809 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.661495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.661577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.661600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.661634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.661654 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.765083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.765157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.765172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.765196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.765211 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.868442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.868497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.868508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.868525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.868536 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.972770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.972865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.972882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.972913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:28 crc kubenswrapper[4792]: I0121 17:57:28.972928 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:28Z","lastTransitionTime":"2026-01-21T17:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.076603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.076653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.076689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.076707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.076718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.180016 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.180078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.180092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.180116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.180131 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.260021 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.260117 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.260182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.260296 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.260318 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.260342 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:33.260298125 +0000 UTC m=+147.242261311 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.260482 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:58:33.26047343 +0000 UTC m=+147.242436606 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.260503 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:58:33.26049267 +0000 UTC m=+147.242455856 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.260351 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 12:00:38.961094646 +0000 UTC Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.283641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.283702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.283718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.283744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.283762 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.360928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.360995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361189 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361189 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361213 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361229 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361235 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361264 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361319 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:58:33.361295611 +0000 UTC m=+147.343258797 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:57:29 crc kubenswrapper[4792]: E0121 17:57:29.361341 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:58:33.361333912 +0000 UTC m=+147.343297098 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.388270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.388317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.388331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.388351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.388367 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.491793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.492226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.492339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.492500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.492605 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.595318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.595369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.595383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.595403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.595415 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.698388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.698447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.698459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.698485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.698500 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.801582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.801648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.801662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.801689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.801708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.820425 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/2.log" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.823206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.823829 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.841799 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.857836 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.874774 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.889992 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.904450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.904506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.904515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.904536 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.904546 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:29Z","lastTransitionTime":"2026-01-21T17:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.906733 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.924376 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.937154 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.959829 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.973282 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:29 crc kubenswrapper[4792]: I0121 17:57:29.986245 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.002377 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.010135 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.010174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.010183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.010201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.010212 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.017837 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.031915 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.047882 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.068943 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.081239 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.093966 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113215 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.113960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.124299 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.216550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.216599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.216616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.216633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.216647 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.246230 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.246331 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:30 crc kubenswrapper[4792]: E0121 17:57:30.246393 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.246346 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:30 crc kubenswrapper[4792]: E0121 17:57:30.246535 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.246603 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:30 crc kubenswrapper[4792]: E0121 17:57:30.246694 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:30 crc kubenswrapper[4792]: E0121 17:57:30.246792 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.261151 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 07:15:35.757238516 +0000 UTC Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.319725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.319784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.319796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.319818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.319833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.423276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.423333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.423352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.423377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.423393 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.526725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.526794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.526809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.526835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.526873 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.630307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.630380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.630393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.630422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.630438 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.733697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.733740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.733752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.733773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.733786 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.828872 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/3.log" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.829458 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/2.log" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.832542 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" exitCode=1 Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.832608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.832644 4792 scope.go:117] "RemoveContainer" containerID="22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.836906 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:57:30 crc kubenswrapper[4792]: E0121 17:57:30.837191 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.838231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.838278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.838288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.838307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.838316 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.849896 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.860683 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.873289 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.896988 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22501bc264639a10b65231a9565f871529f350ff7835b5b7675d712f93e29fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:00Z\\\",\\\"message\\\":\\\"bm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true service.alpha.openshift.io/serving-cert-secret-name:catalog-operator-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0075c7db7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF0121 17:56:59.930646 6406 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:30Z\\\",\\\"message\\\":\\\"Sending *v1.Node event handler 7 for removal\\\\nI0121 17:57:29.595243 6851 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:57:29.595308 6851 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:57:29.595349 6851 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:57:29.595384 6851 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:57:29.595332 6851 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:57:29.595449 6851 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:57:29.595460 6851 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0121 17:57:29.595473 6851 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:57:29.596030 6851 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:57:29.595519 6851 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0121 17:57:29.596033 6851 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:57:29.595988 6851 factory.go:656] Stopping watch factory\\\\nI0121 17:57:29.596179 6851 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0121 17:57:29.596239 6851 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:57:29.596279 6851 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0121 17:57:29.596375 6851 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:57:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.913929 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.930036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.941115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.941184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.941194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.941217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.941227 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:30Z","lastTransitionTime":"2026-01-21T17:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.943247 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.956707 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.971444 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:30 crc kubenswrapper[4792]: I0121 17:57:30.988971 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.006588 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.023792 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.039388 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.043891 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.044072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.044174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.044251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.044318 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.053610 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.076964 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.098197 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.118914 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.134044 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.146757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.146805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.146815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.146832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.146863 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.150415 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.249788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.249821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.249829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.249841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.249872 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.262269 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 10:18:35.461271932 +0000 UTC Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.352604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.352668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.352681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.352704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.352719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.455313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.455355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.455365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.455378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.455387 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.558002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.558053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.558066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.558087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.558102 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.661809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.661936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.661965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.661999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.662024 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.765337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.765384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.765395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.765416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.765428 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.838871 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/3.log" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.843040 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:57:31 crc kubenswrapper[4792]: E0121 17:57:31.843215 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.856629 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.868827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.868920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.868961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.868990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.869007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.870660 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.887608 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.910064 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:30Z\\\",\\\"message\\\":\\\"Sending *v1.Node event handler 7 for removal\\\\nI0121 17:57:29.595243 6851 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:57:29.595308 6851 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:57:29.595349 6851 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:57:29.595384 6851 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:57:29.595332 6851 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:57:29.595449 6851 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:57:29.595460 6851 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0121 17:57:29.595473 6851 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:57:29.596030 6851 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:57:29.595519 6851 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0121 17:57:29.596033 6851 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:57:29.595988 6851 factory.go:656] Stopping watch factory\\\\nI0121 17:57:29.596179 6851 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0121 17:57:29.596239 6851 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:57:29.596279 6851 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0121 17:57:29.596375 6851 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:57:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.929902 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.954141 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.969926 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.972503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.972537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.972551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.972574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.972590 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:31Z","lastTransitionTime":"2026-01-21T17:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.983489 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:31 crc kubenswrapper[4792]: I0121 17:57:31.999610 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.013784 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.028979 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.041943 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.054234 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.064717 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.075137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.075205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.075217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.075239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.075251 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.086352 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.103981 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.119965 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.133055 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.148198 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.178403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.178472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.178490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.178521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.178541 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.246725 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:32 crc kubenswrapper[4792]: E0121 17:57:32.247076 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.247114 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.247257 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:32 crc kubenswrapper[4792]: E0121 17:57:32.247389 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.247529 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:32 crc kubenswrapper[4792]: E0121 17:57:32.247693 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:32 crc kubenswrapper[4792]: E0121 17:57:32.247834 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.263134 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 12:41:24.028979033 +0000 UTC Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.312938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.312985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.312996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.313011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.313020 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.414913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.414974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.414984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.415000 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.415012 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.518091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.518176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.518199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.518233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.518261 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.620755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.620827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.620837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.620878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.620890 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.723803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.723889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.723901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.723921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.723931 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.827084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.827143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.827163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.827190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.827210 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.930174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.930218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.930228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.930246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:32 crc kubenswrapper[4792]: I0121 17:57:32.930257 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:32Z","lastTransitionTime":"2026-01-21T17:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.032788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.032904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.032924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.032944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.032956 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.135126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.135161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.135170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.135183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.135191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.237764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.237835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.237895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.237927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.237945 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.264119 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 23:49:31.79254795 +0000 UTC Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.340514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.340561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.340572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.340588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.340601 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.444679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.444742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.444752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.444776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.444789 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.547691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.547769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.547783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.547806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.547817 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.613421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.613707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.613788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.613932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.614030 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.627321 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.631769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.631795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.631803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.631818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.631829 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.644609 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.648814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.648844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.648869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.648889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.648906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.662251 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.666704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.666748 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.666760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.666775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.666787 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.680916 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.686727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.686781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.686794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.686812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.686825 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.703533 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:33 crc kubenswrapper[4792]: E0121 17:57:33.703765 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.706221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.706258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.706269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.706291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.706304 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.809777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.810225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.810331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.810429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.810519 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.915000 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.915133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.915183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.915211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:33 crc kubenswrapper[4792]: I0121 17:57:33.915229 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:33Z","lastTransitionTime":"2026-01-21T17:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.018879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.018919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.018930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.018949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.018962 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.122512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.122596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.122613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.122641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.122658 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.229756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.229821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.229835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.229876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.229899 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.246618 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.246721 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.246618 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:34 crc kubenswrapper[4792]: E0121 17:57:34.246813 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.246661 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:34 crc kubenswrapper[4792]: E0121 17:57:34.246956 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:34 crc kubenswrapper[4792]: E0121 17:57:34.247065 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:34 crc kubenswrapper[4792]: E0121 17:57:34.247173 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.264444 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 20:19:57.988562354 +0000 UTC Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.333247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.333316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.333338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.333365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.333390 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.437113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.437169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.437179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.437201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.437215 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.540430 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.540738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.540802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.540898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.541011 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.644455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.644525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.644542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.644563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.644576 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.746752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.746822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.746834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.746908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.746924 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.849722 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.849770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.849782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.849806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.849819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.953786 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.953859 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.953872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.953896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:34 crc kubenswrapper[4792]: I0121 17:57:34.953913 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:34Z","lastTransitionTime":"2026-01-21T17:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.057021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.057089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.057101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.057150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.057163 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.159759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.159817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.159829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.159863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.159878 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.262414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.262456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.262465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.262479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.262488 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.264576 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 22:52:27.673202259 +0000 UTC Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.365650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.365715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.365727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.365747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.365760 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.469176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.469212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.469221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.469251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.469261 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.572470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.573519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.573594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.573916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.573985 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.676260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.676299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.676309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.676325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.676336 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.779457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.779521 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.779533 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.779551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.779563 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.882400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.882458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.882475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.882499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.882516 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.984583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.984655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.984664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.984699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:35 crc kubenswrapper[4792]: I0121 17:57:35.984713 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:35Z","lastTransitionTime":"2026-01-21T17:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.088017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.088058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.088071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.088096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.088111 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.192047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.192105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.192118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.192140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.192153 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.245813 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.245928 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.245876 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:36 crc kubenswrapper[4792]: E0121 17:57:36.245980 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:36 crc kubenswrapper[4792]: E0121 17:57:36.246077 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:36 crc kubenswrapper[4792]: E0121 17:57:36.246189 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.246369 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:36 crc kubenswrapper[4792]: E0121 17:57:36.246598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.259182 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.269438 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 15:22:16.099904923 +0000 UTC Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.273448 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.286568 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.295036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.295077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.295090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.295109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.295124 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.307706 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:30Z\\\",\\\"message\\\":\\\"Sending *v1.Node event handler 7 for removal\\\\nI0121 17:57:29.595243 6851 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:57:29.595308 6851 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:57:29.595349 6851 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:57:29.595384 6851 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:57:29.595332 6851 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:57:29.595449 6851 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:57:29.595460 6851 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0121 17:57:29.595473 6851 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:57:29.596030 6851 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:57:29.595519 6851 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0121 17:57:29.596033 6851 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:57:29.595988 6851 factory.go:656] Stopping watch factory\\\\nI0121 17:57:29.596179 6851 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0121 17:57:29.596239 6851 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:57:29.596279 6851 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0121 17:57:29.596375 6851 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:57:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.324875 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.340969 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.359332 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.373569 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.387693 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.397359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.397409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.397423 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.397443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.397457 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.402771 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.419103 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.433250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.447885 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.463473 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.485210 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.501898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.502027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.502050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.502080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.502095 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.502473 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.523342 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.540392 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.558180 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.605170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.605203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.605231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.605245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.605257 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.707741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.707772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.707781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.707795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.707805 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.810178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.810223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.810233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.810248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.810262 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.913298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.913364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.913378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.913402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:36 crc kubenswrapper[4792]: I0121 17:57:36.913420 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:36Z","lastTransitionTime":"2026-01-21T17:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.016613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.017159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.017233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.017260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.017278 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.119923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.119984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.119997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.120021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.120037 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.224144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.224188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.224198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.224216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.224229 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.269917 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 02:55:33.744005505 +0000 UTC Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.327323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.327362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.327370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.327387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.327398 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.431097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.431158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.431172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.431194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.431209 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.534227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.534265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.534293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.534310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.534319 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.636998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.637072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.637084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.637118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.637132 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.740595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.740654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.740666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.740684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.740695 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.842730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.842799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.842811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.842868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.842896 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.946959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.947030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.947039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.947054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:37 crc kubenswrapper[4792]: I0121 17:57:37.947062 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:37Z","lastTransitionTime":"2026-01-21T17:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.050603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.050642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.050650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.050663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.050673 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.153638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.153726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.153741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.153766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.153786 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.246314 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.246375 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.246497 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:38 crc kubenswrapper[4792]: E0121 17:57:38.246655 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.246675 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:38 crc kubenswrapper[4792]: E0121 17:57:38.246806 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:38 crc kubenswrapper[4792]: E0121 17:57:38.247267 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:38 crc kubenswrapper[4792]: E0121 17:57:38.247162 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.256014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.256070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.256082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.256103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.256117 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.274620 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 12:18:37.194224904 +0000 UTC Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.358348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.358411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.358425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.358441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.358453 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.460224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.460259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.460268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.460281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.460290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.562573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.562610 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.562619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.562634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.562643 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.664917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.664966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.664977 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.664993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.665005 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.767222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.767284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.767297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.767313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.767324 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.869934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.870026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.870039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.870063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.870076 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.973300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.973379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.973394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.973421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:38 crc kubenswrapper[4792]: I0121 17:57:38.973436 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:38Z","lastTransitionTime":"2026-01-21T17:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.077192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.077259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.077272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.077297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.077312 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.180570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.180645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.180667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.180696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.180714 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.276117 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 18:32:32.206845905 +0000 UTC Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.283700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.283773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.283797 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.283827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.283846 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.387936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.388002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.388013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.388030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.388039 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.491561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.491656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.491677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.491702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.491718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.593904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.594019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.594042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.594069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.594096 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.698139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.698231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.698257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.698289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.698313 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.801062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.801139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.801172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.801203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.801221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.904646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.904704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.904715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.904738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:39 crc kubenswrapper[4792]: I0121 17:57:39.904755 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:39Z","lastTransitionTime":"2026-01-21T17:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.007622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.007681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.007695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.007714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.007726 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.111064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.111142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.111155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.111178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.111193 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.213233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.213287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.213297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.213314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.213325 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.245661 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.245777 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.245788 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:40 crc kubenswrapper[4792]: E0121 17:57:40.246016 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:40 crc kubenswrapper[4792]: E0121 17:57:40.246392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:40 crc kubenswrapper[4792]: E0121 17:57:40.246585 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.246704 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:40 crc kubenswrapper[4792]: E0121 17:57:40.246833 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.276800 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 15:48:49.050947004 +0000 UTC Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.316542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.316605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.316618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.316643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.316659 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.420048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.420120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.420139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.420168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.420186 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.523569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.523625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.523639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.523660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.523673 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.626253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.626328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.626353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.626381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.626403 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.729998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.730049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.730063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.730083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.730098 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.834139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.834219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.834237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.834267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.834301 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.938532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.938604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.938619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.938643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:40 crc kubenswrapper[4792]: I0121 17:57:40.938661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:40Z","lastTransitionTime":"2026-01-21T17:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.042066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.042131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.042143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.042160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.042172 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.144913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.144961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.144974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.145002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.145015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.252824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.252900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.252914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.252935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.252948 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.277083 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 05:30:34.541116658 +0000 UTC Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.355500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.355752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.355842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.355948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.356030 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.458941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.459231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.459318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.459390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.459533 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.562600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.562652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.562667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.562684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.562712 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.665209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.665526 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.665621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.665719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.665816 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.768559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.768633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.768653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.768682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.768701 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.872413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.872493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.872507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.872528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.872541 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.974581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.974646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.974654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.974670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:41 crc kubenswrapper[4792]: I0121 17:57:41.974680 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:41Z","lastTransitionTime":"2026-01-21T17:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.077146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.077186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.077195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.077211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.077221 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.179466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.179517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.179529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.179544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.179554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.246039 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.246095 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:42 crc kubenswrapper[4792]: E0121 17:57:42.246177 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.246210 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.246271 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:42 crc kubenswrapper[4792]: E0121 17:57:42.246364 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:42 crc kubenswrapper[4792]: E0121 17:57:42.246463 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:42 crc kubenswrapper[4792]: E0121 17:57:42.246576 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.278336 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 20:38:59.74855694 +0000 UTC Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.282668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.282724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.282746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.282774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.282795 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.385293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.385336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.385346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.385363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.385374 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.489510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.489546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.489555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.489570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.489579 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.591954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.591988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.591996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.592010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.592019 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.694223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.694254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.694264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.694277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.694286 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.797264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.797302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.797313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.797328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.797338 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.900113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.900169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.900178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.900197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:42 crc kubenswrapper[4792]: I0121 17:57:42.900212 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:42Z","lastTransitionTime":"2026-01-21T17:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.003793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.003842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.003873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.003892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.003902 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.107796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.107883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.107899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.107917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.107930 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.210577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.210624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.210637 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.210656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.210668 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.248443 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:57:43 crc kubenswrapper[4792]: E0121 17:57:43.248803 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.278517 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 01:36:51.066757173 +0000 UTC Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.313394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.313459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.313486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.313512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.313527 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.416408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.416512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.416530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.416551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.416595 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.519376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.519419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.519428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.519442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.519450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.621383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.621480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.621496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.621512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.621525 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.723518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.723555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.723564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.723578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.723587 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.825826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.825876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.825886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.825899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.826124 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.928360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.928394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.928405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.928421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.928431 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: E0121 17:57:43.949596 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.953088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.953164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.953178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.953193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.953202 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:43 crc kubenswrapper[4792]: E0121 17:57:43.987264 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.992267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.992313 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.992326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.992372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:43 crc kubenswrapper[4792]: I0121 17:57:43.992386 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:43Z","lastTransitionTime":"2026-01-21T17:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.010837 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.016614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.016651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.016668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.016682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.016692 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.031758 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.036221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.036256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.036266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.036280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.036289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.052651 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"24361cbe-9975-4a7d-97f7-cc9f17426792\\\",\\\"systemUUID\\\":\\\"d27e7875-2a6d-4c63-ab2a-7dede22b0172\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.052899 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.054639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.054672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.054680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.054696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.054707 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.157684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.157738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.157747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.157761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.157770 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.246221 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.246255 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.246288 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.246233 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.246351 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.246497 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.246679 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:44 crc kubenswrapper[4792]: E0121 17:57:44.246728 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.259706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.259741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.259749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.259763 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.259771 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.279120 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 00:22:06.262093408 +0000 UTC Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.363008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.363069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.363081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.363100 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.363114 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.465469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.465519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.465532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.465552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.465566 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.568245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.568301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.568314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.568331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.568341 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.671609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.671662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.671676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.671694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.671706 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.774894 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.774954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.774966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.774988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.775007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.877696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.877739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.877749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.877765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.877776 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.980743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.980798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.980812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.980835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:44 crc kubenswrapper[4792]: I0121 17:57:44.980873 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:44Z","lastTransitionTime":"2026-01-21T17:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.083999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.084059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.084073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.084094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.084107 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.187995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.188070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.188089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.188116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.188133 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.279256 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 06:46:46.06583045 +0000 UTC Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.291603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.291646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.291658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.291676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.291689 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.394690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.394756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.394778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.394812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.394833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.498169 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.498230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.498247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.498277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.498308 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.602104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.602163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.602173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.602197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.602211 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.706116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.706193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.706206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.706224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.706237 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.809524 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.809892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.809974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.810037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.810150 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.913522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.913561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.913571 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.913589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:45 crc kubenswrapper[4792]: I0121 17:57:45.913601 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:45Z","lastTransitionTime":"2026-01-21T17:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.016438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.016510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.016525 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.016546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.016560 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.118869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.118928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.118939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.118960 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.118971 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.222182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.222230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.222240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.222261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.222273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.245628 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.245687 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.245628 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:46 crc kubenswrapper[4792]: E0121 17:57:46.245831 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.245976 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:46 crc kubenswrapper[4792]: E0121 17:57:46.246072 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:46 crc kubenswrapper[4792]: E0121 17:57:46.246174 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:46 crc kubenswrapper[4792]: E0121 17:57:46.246222 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.262214 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1cb5f50-d9b8-4e61-92ce-7b9ef5491779\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://462d3a0da8e3afaa0569ceef49de945142d2bb8fa425bec44c2f042f8b8dadea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://646eded1133458711bd3f2332e456ea14515c798cfeaf607b55693b21a5d7710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://403794c14c93f1b64a1b832fa1d8f7d45e8a226e95dca012e258782951933d90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.279714 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 05:05:28.381597798 +0000 UTC Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.280766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.293497 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ckvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"145eca9e-18bd-4006-9768-30bfcc753c06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://24e65e0cf1c9db0b99471fb76155339d2a5513a17825886914b9df9987550bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rtq5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ckvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.311917 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4a2d61b-a0b0-4073-bd76-665e9fa19250\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a32de4f393c30583281a4431630dbe23e1420386fbdeed8737595b78d464845\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36f12319da77e736cc4fd4803c0186c2821e25764a9b8bbb41e137b95b6251f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fnvmp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vrcps\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.325286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.325340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.325353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.325375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.325388 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.328544 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97b1a1f0-3533-44d9-8c10-9feb31d988ea\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgmns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-ddsqn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.347463 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd97b948-4785-419e-b471-aac8172716f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a73d7382542136d91922475711138b5a59633f510c9beadd57bba84cf27db54e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41cdac15a17aeca1efb61e6fb234ab59861bb3b692cc028f5d305c23eaad7366\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98f09db41085faefd387a464b2e702916f6ee1850251810834bffc896ec4479e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1b47455f06c5a6fb55c0ba54c00c3113575c9aceec13f75c0b763b6a8677f06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.365676 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.381903 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://678dabfb2c535e6011ce05274c284fe489067d4c3fde061f836f33eb38f49307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.400181 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4747b3f3d5810a9416d406221d950d19d24951437e1d0dcf62e697874ca8572f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.418041 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tvdgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"129c7cf1-6a9e-440a-8d4e-049c0652cf6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:57:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:18Z\\\",\\\"message\\\":\\\"2026-01-21T17:56:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb\\\\n2026-01-21T17:56:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3a3e61e1-36be-4ea1-be4b-a687e86434bb to /host/opt/cni/bin/\\\\n2026-01-21T17:56:33Z [verbose] multus-daemon started\\\\n2026-01-21T17:56:33Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:57:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:57:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7p2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tvdgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.428171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.428224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.428239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.428260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.428277 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.443692 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b67b2cbc-d9b2-4770-a2e2-a8fbe6b6f9d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://625c36072438e9bf7fbb8d5a230b8d50391c1adb285ad7a9f02e9861009c11e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d97517244e2065db2badccf4ce74e89c94983f35c132f877031e314ae68ff050\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a29b10e4979532f83eb462274708882fb330970b5776018be2fdf74bca7d3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://447163e40d22bb80ba0d376364040262ef9317f3389918b6bcc6a2ba381ad43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984d732e4150a60f475067196110a8c3ddd2c85186cb0dd094bc66b942941b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2b7b3642bc9b355ee69c3790b520355e5025298ea15e00ba6f282facf2cf742\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f6a6b5d4862c26e6361ab7b160823547d0e30ebb592257b0a91b2a86c500ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9ca4e4b07ef6766dba40bf1c4398491b22f86fa9d993726ccd001ecd04e93ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.457648 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f6f746df-ab9c-430c-980c-241744c37dec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.472876 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b5914fec82806418459a4e92933499289e5725d7c4418ca5523d3b4472459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.486627 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.502494 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d8728e15-00c6-4fa7-a79a-cee551b64c18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://429adc0e9566435190bc8405be5bf7b367cabf8c69b658e06292cb0ebb2594d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e737bb5dbdc6c986e3f16a524439b8350c71d4ad789d4b2974d29ad27095b51e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a689fefbe35b93ac92cd9b7db630a560cc96537afb4c65733956a0c2aeeb622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f72dd0780fbf412b9fa8e000c50c3631baff88661ff30ee3cf8ca96d56fac4a6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc2d4af7ec78a2cc668046ed13a3e2367828b741a5fe84a8b67f26cd6e1ba797\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75672f5d10cffdd04c496ecf52c6fd4260ef0da0957fa70e800722b2fd136f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0dce6125821e10f5ae8bda42425ce1264d61743c7ecef9bf2025a1a25a26ce7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fbmv4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6lc6z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.516592 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b7f70249-6f22-4823-87dd-89a44246ea51\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d9f88d458e735cbdddcc588fc99a287f622bc6910b99568b03ede890ba58de2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a7c52189641ff112ef87110b27b8c28706567e55e5f2089d42c95fec647b2a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:06Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.528706 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jbz42" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2dc141da-a3db-4ef1-8e59-d0e1d5dee765\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04331656a09873a5faafd4c1b4ccb06c907817416771d7cbe857c8e1e063eac3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7zdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jbz42\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.530738 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.530797 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.530811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.530829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.530841 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.542073 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"759f2e21-e44e-4049-b262-cb49448e22ab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29efd827542af0c23d196f5fd74536cd410c1d28be166b70832f6c585c9a2311\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sb6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5d6x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.563261 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10549a02-b482-4bc0-a770-65dbb57f340a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:57:30Z\\\",\\\"message\\\":\\\"Sending *v1.Node event handler 7 for removal\\\\nI0121 17:57:29.595243 6851 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:57:29.595308 6851 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0121 17:57:29.595349 6851 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0121 17:57:29.595384 6851 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0121 17:57:29.595332 6851 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:57:29.595449 6851 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:57:29.595460 6851 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0121 17:57:29.595473 6851 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0121 17:57:29.596030 6851 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0121 17:57:29.595519 6851 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0121 17:57:29.596033 6851 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:57:29.595988 6851 factory.go:656] Stopping watch factory\\\\nI0121 17:57:29.596179 6851 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0121 17:57:29.596239 6851 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:57:29.596279 6851 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0121 17:57:29.596375 6851 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:57:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:56:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:56:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rnmzx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:56:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-8t4xq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:57:46Z is after 2025-08-24T17:21:41Z" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.633218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.633260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.633271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.633287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.633298 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.735113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.735142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.735153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.735168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.735178 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.838614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.838665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.838676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.838695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.838708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.943413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.943483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.943493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.943515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:46 crc kubenswrapper[4792]: I0121 17:57:46.943527 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:46Z","lastTransitionTime":"2026-01-21T17:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.046657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.046696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.046708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.046729 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.046742 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.060468 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:47 crc kubenswrapper[4792]: E0121 17:57:47.060689 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:57:47 crc kubenswrapper[4792]: E0121 17:57:47.060760 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs podName:97b1a1f0-3533-44d9-8c10-9feb31d988ea nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.060741027 +0000 UTC m=+165.042704213 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs") pod "network-metrics-daemon-ddsqn" (UID: "97b1a1f0-3533-44d9-8c10-9feb31d988ea") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.149691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.149765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.149778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.149795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.149805 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.253450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.253500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.253510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.253530 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.253541 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.280177 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 01:19:10.045220632 +0000 UTC Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.356889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.356993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.357008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.357027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.357044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.460095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.460134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.460144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.460157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.460167 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.562473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.562517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.562529 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.562546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.562555 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.664870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.664927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.664939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.664955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.664967 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.767841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.767909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.767919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.767939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.767956 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.870220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.870259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.870267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.870281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.870290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.972906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.972933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.972941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.972953 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:47 crc kubenswrapper[4792]: I0121 17:57:47.972963 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:47Z","lastTransitionTime":"2026-01-21T17:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.075532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.075583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.075593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.075605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.075614 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.178325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.178370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.178380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.178394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.178406 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.245740 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.245755 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.245760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.245770 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:48 crc kubenswrapper[4792]: E0121 17:57:48.245983 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:48 crc kubenswrapper[4792]: E0121 17:57:48.246066 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:48 crc kubenswrapper[4792]: E0121 17:57:48.246157 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:48 crc kubenswrapper[4792]: E0121 17:57:48.246269 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.280391 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 23:58:33.469668747 +0000 UTC Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.282106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.282164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.282179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.282206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.282225 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.385556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.385619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.385634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.385660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.385708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.489569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.489620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.489632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.489650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.489661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.593049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.593095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.593103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.593120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.593134 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.695627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.696123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.696234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.696367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.696450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.799472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.799926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.799994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.800069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.800154 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.904100 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.904146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.904164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.904183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:48 crc kubenswrapper[4792]: I0121 17:57:48.904196 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:48Z","lastTransitionTime":"2026-01-21T17:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.007293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.007353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.007365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.007384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.007397 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.110245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.110604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.110695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.110782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.110843 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.212693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.212944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.213025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.213099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.213159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.281313 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 07:58:44.123207401 +0000 UTC Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.315347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.315401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.315418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.315441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.315456 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.419354 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.419707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.419818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.419975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.420072 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.524418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.524472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.524486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.524516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.524528 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.628278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.628319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.628329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.628342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.628353 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.730808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.730913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.730925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.730938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.730947 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.833281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.833338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.833347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.833362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.833370 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.935767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.935811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.935822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.935840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:49 crc kubenswrapper[4792]: I0121 17:57:49.935874 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:49Z","lastTransitionTime":"2026-01-21T17:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.038649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.038690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.038702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.038719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.038732 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.141369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.141402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.141410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.141424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.141433 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.243737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.243773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.243782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.243795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.243803 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.246017 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.246030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.246063 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.246182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:50 crc kubenswrapper[4792]: E0121 17:57:50.246262 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:50 crc kubenswrapper[4792]: E0121 17:57:50.246319 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:50 crc kubenswrapper[4792]: E0121 17:57:50.246375 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:50 crc kubenswrapper[4792]: E0121 17:57:50.246549 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.281643 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 10:36:58.935524808 +0000 UTC Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.346032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.346078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.346090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.346109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.346123 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.448511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.448545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.448554 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.448567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.448575 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.551480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.551559 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.551568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.551583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.551592 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.653664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.653700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.653727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.653742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.653752 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.756584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.756894 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.756964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.757061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.757128 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.859277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.859320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.859330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.859345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.859354 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.962794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.962886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.962899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.962920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:50 crc kubenswrapper[4792]: I0121 17:57:50.962936 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:50Z","lastTransitionTime":"2026-01-21T17:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.065820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.065880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.065902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.065917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.065927 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.168661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.168713 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.168727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.168745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.168756 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.271133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.271174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.271182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.271195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.271205 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.281806 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 05:29:53.383937999 +0000 UTC Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.373820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.374146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.374155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.374175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.374192 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.476352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.476428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.476443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.476467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.476484 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.579573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.579619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.579630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.579647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.579659 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.682068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.682116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.682129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.682147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.682159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.784828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.784890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.784899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.784911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.784920 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.887971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.888030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.888042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.888064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.888078 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.990688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.990755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.990768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.990791 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:51 crc kubenswrapper[4792]: I0121 17:57:51.990803 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:51Z","lastTransitionTime":"2026-01-21T17:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.094260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.094319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.094331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.094351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.094367 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.198154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.198628 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.198705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.198795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.198934 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.246789 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.246943 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.246873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.246873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:52 crc kubenswrapper[4792]: E0121 17:57:52.247068 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:52 crc kubenswrapper[4792]: E0121 17:57:52.247203 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:52 crc kubenswrapper[4792]: E0121 17:57:52.247303 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:52 crc kubenswrapper[4792]: E0121 17:57:52.247396 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.282913 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:41:49.740377386 +0000 UTC Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.303046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.303113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.303123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.303144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.303155 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.444515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.444580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.444605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.444627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.444641 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.547360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.547414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.547424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.547452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.547472 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.650477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.650728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.650957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.651124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.651262 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.754224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.754251 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.754259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.754272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.754280 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.857091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.857144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.857156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.857177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.857189 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.960233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.960270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.960279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.960293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:52 crc kubenswrapper[4792]: I0121 17:57:52.960302 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:52Z","lastTransitionTime":"2026-01-21T17:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.066633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.066923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.067050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.067144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.067215 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.170233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.170528 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.170605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.170702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.170781 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.273804 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.273930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.273957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.273992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.274015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.283934 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 18:52:48.367153356 +0000 UTC Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.377082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.377125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.377136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.377150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.377160 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.480531 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.480611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.480631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.480665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.480684 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.583977 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.584255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.584340 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.584469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.584556 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.686723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.687037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.687127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.687218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.687297 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.789269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.789318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.789330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.789349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.789368 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.891456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.891494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.891504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.891519 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.891528 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.994285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.994839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.995109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.995277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:53 crc kubenswrapper[4792]: I0121 17:57:53.995500 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:53Z","lastTransitionTime":"2026-01-21T17:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.099458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.099506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.099522 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.099541 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.099552 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:54Z","lastTransitionTime":"2026-01-21T17:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.202881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.202927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.202944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.202966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.202984 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:54Z","lastTransitionTime":"2026-01-21T17:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.223210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.223249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.223260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.223278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.223290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:57:54Z","lastTransitionTime":"2026-01-21T17:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.245751 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:54 crc kubenswrapper[4792]: E0121 17:57:54.246123 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.246372 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.246402 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:54 crc kubenswrapper[4792]: E0121 17:57:54.246698 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.246785 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:54 crc kubenswrapper[4792]: E0121 17:57:54.246990 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:54 crc kubenswrapper[4792]: E0121 17:57:54.247112 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.284120 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 05:09:27.625516579 +0000 UTC Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.284200 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.293785 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9"] Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.294169 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.295523 4792 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.306217 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.306278 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.306736 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.306776 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.359304 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b483a376-6ed8-4537-80a4-b31131254fbd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.359370 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b483a376-6ed8-4537-80a4-b31131254fbd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.359391 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.359410 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.359437 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b483a376-6ed8-4537-80a4-b31131254fbd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.380016 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-6lc6z" podStartSLOduration=87.379994943 podStartE2EDuration="1m27.379994943s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.347313435 +0000 UTC m=+108.329276641" watchObservedRunningTime="2026-01-21 17:57:54.379994943 +0000 UTC m=+108.361958129" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.380612 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.380605597 podStartE2EDuration="1m28.380605597s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.380410933 +0000 UTC m=+108.362374119" watchObservedRunningTime="2026-01-21 17:57:54.380605597 +0000 UTC m=+108.362568783" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.404230 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.404201501 podStartE2EDuration="1m30.404201501s" podCreationTimestamp="2026-01-21 17:56:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.403979025 +0000 UTC m=+108.385942221" watchObservedRunningTime="2026-01-21 17:57:54.404201501 +0000 UTC m=+108.386164687" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460054 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b483a376-6ed8-4537-80a4-b31131254fbd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460481 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b483a376-6ed8-4537-80a4-b31131254fbd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460637 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460753 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460813 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.460904 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b483a376-6ed8-4537-80a4-b31131254fbd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.461078 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b483a376-6ed8-4537-80a4-b31131254fbd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.462011 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b483a376-6ed8-4537-80a4-b31131254fbd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.476423 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b483a376-6ed8-4537-80a4-b31131254fbd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.483153 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b483a376-6ed8-4537-80a4-b31131254fbd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-477z9\" (UID: \"b483a376-6ed8-4537-80a4-b31131254fbd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.494933 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.494897822 podStartE2EDuration="32.494897822s" podCreationTimestamp="2026-01-21 17:57:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.494789219 +0000 UTC m=+108.476752425" watchObservedRunningTime="2026-01-21 17:57:54.494897822 +0000 UTC m=+108.476861008" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.509950 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-jbz42" podStartSLOduration=88.509918784 podStartE2EDuration="1m28.509918784s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.509191535 +0000 UTC m=+108.491154721" watchObservedRunningTime="2026-01-21 17:57:54.509918784 +0000 UTC m=+108.491881970" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.524925 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podStartSLOduration=87.524899833 podStartE2EDuration="1m27.524899833s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.524807371 +0000 UTC m=+108.506770557" watchObservedRunningTime="2026-01-21 17:57:54.524899833 +0000 UTC m=+108.506863029" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.538049 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vrcps" podStartSLOduration=87.538002088 podStartE2EDuration="1m27.538002088s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.536603673 +0000 UTC m=+108.518566889" watchObservedRunningTime="2026-01-21 17:57:54.538002088 +0000 UTC m=+108.519965274" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.556320 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=87.556294269 podStartE2EDuration="1m27.556294269s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.555145131 +0000 UTC m=+108.537108337" watchObservedRunningTime="2026-01-21 17:57:54.556294269 +0000 UTC m=+108.538257455" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.600903 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6ckvd" podStartSLOduration=88.600881031 podStartE2EDuration="1m28.600881031s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.58301738 +0000 UTC m=+108.564980576" watchObservedRunningTime="2026-01-21 17:57:54.600881031 +0000 UTC m=+108.582844217" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.613922 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.617983 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-tvdgr" podStartSLOduration=87.617956643 podStartE2EDuration="1m27.617956643s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.617215964 +0000 UTC m=+108.599179150" watchObservedRunningTime="2026-01-21 17:57:54.617956643 +0000 UTC m=+108.599919829" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.677795 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.677764112 podStartE2EDuration="52.677764112s" podCreationTimestamp="2026-01-21 17:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:54.65586763 +0000 UTC m=+108.637830816" watchObservedRunningTime="2026-01-21 17:57:54.677764112 +0000 UTC m=+108.659727298" Jan 21 17:57:54 crc kubenswrapper[4792]: I0121 17:57:54.927406 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" event={"ID":"b483a376-6ed8-4537-80a4-b31131254fbd","Type":"ContainerStarted","Data":"5dc86ba412b23eb41869e22660970dad7ba3b37dec12657263debb8452c5427d"} Jan 21 17:57:55 crc kubenswrapper[4792]: I0121 17:57:55.246661 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:57:55 crc kubenswrapper[4792]: E0121 17:57:55.246869 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:57:55 crc kubenswrapper[4792]: I0121 17:57:55.932307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" event={"ID":"b483a376-6ed8-4537-80a4-b31131254fbd","Type":"ContainerStarted","Data":"eef785cbaf60b3bbde99f6d80a2c90e1186e0108ca0f401aa43f8b36cdd30ab5"} Jan 21 17:57:55 crc kubenswrapper[4792]: I0121 17:57:55.951452 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-477z9" podStartSLOduration=88.951425446 podStartE2EDuration="1m28.951425446s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:57:55.94955193 +0000 UTC m=+109.931515116" watchObservedRunningTime="2026-01-21 17:57:55.951425446 +0000 UTC m=+109.933388632" Jan 21 17:57:56 crc kubenswrapper[4792]: I0121 17:57:56.246896 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:56 crc kubenswrapper[4792]: I0121 17:57:56.246908 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:56 crc kubenswrapper[4792]: I0121 17:57:56.246936 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:56 crc kubenswrapper[4792]: I0121 17:57:56.248592 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:56 crc kubenswrapper[4792]: E0121 17:57:56.248584 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:56 crc kubenswrapper[4792]: E0121 17:57:56.248696 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:56 crc kubenswrapper[4792]: E0121 17:57:56.248953 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:57:56 crc kubenswrapper[4792]: E0121 17:57:56.249231 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:58 crc kubenswrapper[4792]: I0121 17:57:58.246916 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:57:58 crc kubenswrapper[4792]: I0121 17:57:58.246976 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:57:58 crc kubenswrapper[4792]: I0121 17:57:58.247054 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:57:58 crc kubenswrapper[4792]: I0121 17:57:58.247072 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:57:58 crc kubenswrapper[4792]: E0121 17:57:58.247259 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:57:58 crc kubenswrapper[4792]: E0121 17:57:58.247625 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:57:58 crc kubenswrapper[4792]: E0121 17:57:58.247817 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:57:58 crc kubenswrapper[4792]: E0121 17:57:58.248024 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:00 crc kubenswrapper[4792]: I0121 17:58:00.245796 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:00 crc kubenswrapper[4792]: I0121 17:58:00.245925 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:00 crc kubenswrapper[4792]: I0121 17:58:00.246037 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:00 crc kubenswrapper[4792]: I0121 17:58:00.246110 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:00 crc kubenswrapper[4792]: E0121 17:58:00.246685 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:00 crc kubenswrapper[4792]: E0121 17:58:00.246767 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:00 crc kubenswrapper[4792]: E0121 17:58:00.246863 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:00 crc kubenswrapper[4792]: E0121 17:58:00.246862 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:02 crc kubenswrapper[4792]: I0121 17:58:02.246081 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:02 crc kubenswrapper[4792]: I0121 17:58:02.246093 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:02 crc kubenswrapper[4792]: I0121 17:58:02.246130 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:02 crc kubenswrapper[4792]: I0121 17:58:02.246144 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:02 crc kubenswrapper[4792]: E0121 17:58:02.246394 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:02 crc kubenswrapper[4792]: E0121 17:58:02.246474 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:02 crc kubenswrapper[4792]: E0121 17:58:02.247145 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:02 crc kubenswrapper[4792]: E0121 17:58:02.247356 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.246037 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.246041 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:04 crc kubenswrapper[4792]: E0121 17:58:04.246435 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.246498 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.246588 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:04 crc kubenswrapper[4792]: E0121 17:58:04.246807 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:04 crc kubenswrapper[4792]: E0121 17:58:04.247010 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:04 crc kubenswrapper[4792]: E0121 17:58:04.247429 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.970617 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/1.log" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.971941 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/0.log" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.972233 4792 generic.go:334] "Generic (PLEG): container finished" podID="129c7cf1-6a9e-440a-8d4e-049c0652cf6e" containerID="8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74" exitCode=1 Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.972352 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerDied","Data":"8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74"} Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.972669 4792 scope.go:117] "RemoveContainer" containerID="89e0ec903a05872fdbaba120ac297c26a53461201bb9597a87e66f5c7dab35fc" Jan 21 17:58:04 crc kubenswrapper[4792]: I0121 17:58:04.973557 4792 scope.go:117] "RemoveContainer" containerID="8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74" Jan 21 17:58:04 crc kubenswrapper[4792]: E0121 17:58:04.973941 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-tvdgr_openshift-multus(129c7cf1-6a9e-440a-8d4e-049c0652cf6e)\"" pod="openshift-multus/multus-tvdgr" podUID="129c7cf1-6a9e-440a-8d4e-049c0652cf6e" Jan 21 17:58:05 crc kubenswrapper[4792]: I0121 17:58:05.978327 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/1.log" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.207501 4792 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 21 17:58:06 crc kubenswrapper[4792]: I0121 17:58:06.246312 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:06 crc kubenswrapper[4792]: I0121 17:58:06.246376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:06 crc kubenswrapper[4792]: I0121 17:58:06.246483 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:06 crc kubenswrapper[4792]: I0121 17:58:06.247561 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.247764 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.247677 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.247548 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.248139 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:06 crc kubenswrapper[4792]: E0121 17:58:06.347947 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:58:08 crc kubenswrapper[4792]: I0121 17:58:08.247099 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:08 crc kubenswrapper[4792]: I0121 17:58:08.247181 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:08 crc kubenswrapper[4792]: I0121 17:58:08.247264 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:08 crc kubenswrapper[4792]: I0121 17:58:08.247211 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:08 crc kubenswrapper[4792]: E0121 17:58:08.247248 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:08 crc kubenswrapper[4792]: E0121 17:58:08.247421 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:08 crc kubenswrapper[4792]: E0121 17:58:08.247533 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:08 crc kubenswrapper[4792]: E0121 17:58:08.247625 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:09 crc kubenswrapper[4792]: I0121 17:58:09.246613 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:58:09 crc kubenswrapper[4792]: E0121 17:58:09.247218 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-8t4xq_openshift-ovn-kubernetes(10549a02-b482-4bc0-a770-65dbb57f340a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" Jan 21 17:58:10 crc kubenswrapper[4792]: I0121 17:58:10.246894 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:10 crc kubenswrapper[4792]: I0121 17:58:10.246894 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:10 crc kubenswrapper[4792]: I0121 17:58:10.246946 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:10 crc kubenswrapper[4792]: E0121 17:58:10.247149 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:10 crc kubenswrapper[4792]: I0121 17:58:10.247176 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:10 crc kubenswrapper[4792]: E0121 17:58:10.247291 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:10 crc kubenswrapper[4792]: E0121 17:58:10.247434 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:10 crc kubenswrapper[4792]: E0121 17:58:10.247573 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:11 crc kubenswrapper[4792]: E0121 17:58:11.349379 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:58:12 crc kubenswrapper[4792]: I0121 17:58:12.245729 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:12 crc kubenswrapper[4792]: I0121 17:58:12.245784 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:12 crc kubenswrapper[4792]: I0121 17:58:12.245808 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:12 crc kubenswrapper[4792]: E0121 17:58:12.245879 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:12 crc kubenswrapper[4792]: E0121 17:58:12.245959 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:12 crc kubenswrapper[4792]: E0121 17:58:12.246041 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:12 crc kubenswrapper[4792]: I0121 17:58:12.246090 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:12 crc kubenswrapper[4792]: E0121 17:58:12.246139 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:14 crc kubenswrapper[4792]: I0121 17:58:14.246493 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:14 crc kubenswrapper[4792]: I0121 17:58:14.246554 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:14 crc kubenswrapper[4792]: E0121 17:58:14.246657 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:14 crc kubenswrapper[4792]: I0121 17:58:14.246587 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:14 crc kubenswrapper[4792]: E0121 17:58:14.246758 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:14 crc kubenswrapper[4792]: I0121 17:58:14.246711 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:14 crc kubenswrapper[4792]: E0121 17:58:14.246904 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:14 crc kubenswrapper[4792]: E0121 17:58:14.247048 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:16 crc kubenswrapper[4792]: I0121 17:58:16.246079 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:16 crc kubenswrapper[4792]: I0121 17:58:16.246097 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:16 crc kubenswrapper[4792]: I0121 17:58:16.246143 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:16 crc kubenswrapper[4792]: I0121 17:58:16.246142 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:16 crc kubenswrapper[4792]: E0121 17:58:16.247460 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:16 crc kubenswrapper[4792]: E0121 17:58:16.247529 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:16 crc kubenswrapper[4792]: E0121 17:58:16.247602 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:16 crc kubenswrapper[4792]: E0121 17:58:16.247683 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:16 crc kubenswrapper[4792]: E0121 17:58:16.349968 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:58:18 crc kubenswrapper[4792]: I0121 17:58:18.246508 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:18 crc kubenswrapper[4792]: I0121 17:58:18.246565 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:18 crc kubenswrapper[4792]: E0121 17:58:18.247215 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:18 crc kubenswrapper[4792]: I0121 17:58:18.246667 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:18 crc kubenswrapper[4792]: I0121 17:58:18.246683 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:18 crc kubenswrapper[4792]: E0121 17:58:18.247252 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:18 crc kubenswrapper[4792]: E0121 17:58:18.247387 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:18 crc kubenswrapper[4792]: I0121 17:58:18.247029 4792 scope.go:117] "RemoveContainer" containerID="8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74" Jan 21 17:58:18 crc kubenswrapper[4792]: E0121 17:58:18.247563 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:19 crc kubenswrapper[4792]: I0121 17:58:19.025688 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/1.log" Jan 21 17:58:19 crc kubenswrapper[4792]: I0121 17:58:19.025771 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerStarted","Data":"da9feb3c04286ecb6efb1df895e99a5d49f616f2aff0c41da9f572606de171ae"} Jan 21 17:58:20 crc kubenswrapper[4792]: I0121 17:58:20.249894 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:20 crc kubenswrapper[4792]: E0121 17:58:20.250147 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:20 crc kubenswrapper[4792]: I0121 17:58:20.250479 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:20 crc kubenswrapper[4792]: E0121 17:58:20.250575 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:20 crc kubenswrapper[4792]: I0121 17:58:20.250769 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:20 crc kubenswrapper[4792]: E0121 17:58:20.250883 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:20 crc kubenswrapper[4792]: I0121 17:58:20.251973 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:20 crc kubenswrapper[4792]: E0121 17:58:20.252076 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:20 crc kubenswrapper[4792]: I0121 17:58:20.252887 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.034662 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/3.log" Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.036285 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerStarted","Data":"737adb699317b3944466c13490578c30fb1889fc1dc4298b2cf8ae612c8c94c9"} Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.037174 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.048312 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ddsqn"] Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.048684 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:21 crc kubenswrapper[4792]: E0121 17:58:21.048761 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:21 crc kubenswrapper[4792]: I0121 17:58:21.068126 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podStartSLOduration=114.068104099 podStartE2EDuration="1m54.068104099s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:21.067597727 +0000 UTC m=+135.049560923" watchObservedRunningTime="2026-01-21 17:58:21.068104099 +0000 UTC m=+135.050067285" Jan 21 17:58:21 crc kubenswrapper[4792]: E0121 17:58:21.351823 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:58:22 crc kubenswrapper[4792]: I0121 17:58:22.246438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:22 crc kubenswrapper[4792]: E0121 17:58:22.246601 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:22 crc kubenswrapper[4792]: I0121 17:58:22.246669 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:22 crc kubenswrapper[4792]: I0121 17:58:22.246701 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:22 crc kubenswrapper[4792]: E0121 17:58:22.246800 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:22 crc kubenswrapper[4792]: I0121 17:58:22.246676 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:22 crc kubenswrapper[4792]: E0121 17:58:22.246876 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:22 crc kubenswrapper[4792]: E0121 17:58:22.246953 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:24 crc kubenswrapper[4792]: I0121 17:58:24.246300 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:24 crc kubenswrapper[4792]: I0121 17:58:24.246348 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:24 crc kubenswrapper[4792]: I0121 17:58:24.246355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:24 crc kubenswrapper[4792]: E0121 17:58:24.246447 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:24 crc kubenswrapper[4792]: I0121 17:58:24.246461 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:24 crc kubenswrapper[4792]: E0121 17:58:24.246511 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:24 crc kubenswrapper[4792]: E0121 17:58:24.246910 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:24 crc kubenswrapper[4792]: E0121 17:58:24.247012 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:26 crc kubenswrapper[4792]: I0121 17:58:26.246551 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:26 crc kubenswrapper[4792]: I0121 17:58:26.246618 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:26 crc kubenswrapper[4792]: I0121 17:58:26.247869 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:26 crc kubenswrapper[4792]: I0121 17:58:26.247890 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:26 crc kubenswrapper[4792]: E0121 17:58:26.247964 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:58:26 crc kubenswrapper[4792]: E0121 17:58:26.248019 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsqn" podUID="97b1a1f0-3533-44d9-8c10-9feb31d988ea" Jan 21 17:58:26 crc kubenswrapper[4792]: E0121 17:58:26.248094 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:58:26 crc kubenswrapper[4792]: E0121 17:58:26.248176 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.246344 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.246451 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.246357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.246357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.249024 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.249218 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.249384 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.249479 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.250395 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 21 17:58:28 crc kubenswrapper[4792]: I0121 17:58:28.251627 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 21 17:58:29 crc kubenswrapper[4792]: I0121 17:58:29.076107 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.335048 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.335179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.335206 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:33 crc kubenswrapper[4792]: E0121 17:58:33.335296 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 18:00:35.335260732 +0000 UTC m=+269.317223928 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.335939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.343689 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.368943 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.435997 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.436046 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.440062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.440775 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.677151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:58:33 crc kubenswrapper[4792]: I0121 17:58:33.684355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.096430 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7ba7278ec5ff00f25aa0f241e26b1cdb65fcb5e0de7ca822dd0ecf311e829247"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.096656 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d14321181dac2ba0c00f8b4f74e4d55f89a8caaa28b359763a323526637f2b7f"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.099211 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d8d0ceb3b006e3c618004ee915c4965570204192c713a2ff4b511efc8da6547a"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.099246 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c40b57e42c819343e1371f20afa8f0f74de0a284a8963531799af6f93f652084"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.100655 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9d2ebbb2cac28d68f3bdec09e786b38e8e624ff4be30e3c274f15380fdace058"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.100765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2c43a542792be02683a77ee2379accb917ce3bf47daffee62156e306b8da4fae"} Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.303417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.340701 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.341564 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.342129 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.342837 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.342890 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.343555 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.343561 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lctf8"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.344041 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.344252 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.352374 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.352415 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.352433 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.352691 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353021 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353102 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353226 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353267 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353478 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.353593 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.357302 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.357501 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.357684 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.357308 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.364556 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.365572 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.365606 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.366041 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.366069 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.366392 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.366654 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.366928 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.367405 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.367955 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zhchl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.368578 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.368738 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vs6k9"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.369359 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.369692 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cvxjv"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.370631 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.373768 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376116 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376332 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376530 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376556 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376837 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.376878 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.377145 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.377299 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.377714 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.377825 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.377927 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378262 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378310 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378449 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378574 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378272 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.378799 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.380355 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.380709 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q7mdj"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.381356 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.385212 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.387264 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.387476 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.387481 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.387995 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388021 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388139 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388152 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388324 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388466 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.388810 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.392997 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzpjg"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.393546 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.393636 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.398075 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.402919 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.403779 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.434064 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.434721 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.434737 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435182 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435310 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435322 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435380 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435485 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435670 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.435883 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.436277 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.437740 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.438135 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.441330 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.441601 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.441777 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.441900 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.442054 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.442127 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.442453 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.442602 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.443270 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.443540 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.443657 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.445916 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.446006 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.446445 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.446782 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.447204 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.448334 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.448821 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.449216 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.449434 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.449078 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.449647 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.449665 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mrvfm"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.450429 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.452023 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.452253 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.452837 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458000 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458472 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-images\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36a8e839-34d3-48ad-908e-4bcc8c996cca-machine-approver-tls\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf55v\" (UniqueName: \"kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458907 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459011 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459201 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-service-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-config\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459594 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-client\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458504 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458593 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458659 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458676 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459792 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458712 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458716 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458723 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458746 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458755 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458858 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458894 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.458926 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459192 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.459665 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460476 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnn6q\" (UniqueName: \"kubernetes.io/projected/36a8e839-34d3-48ad-908e-4bcc8c996cca-kube-api-access-pnn6q\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460594 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khdpt\" (UniqueName: \"kubernetes.io/projected/209b3283-9e92-472c-bc99-10d9d56788b2-kube-api-access-khdpt\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460703 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/940eecee-282d-4cf5-b296-0a9b63ea3d95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460794 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-config\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/209b3283-9e92-472c-bc99-10d9d56788b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.460999 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/387582b8-e641-4519-a20f-794c23707b51-metrics-tls\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461087 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461207 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/209b3283-9e92-472c-bc99-10d9d56788b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckwfz\" (UniqueName: \"kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461484 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-serving-cert\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461566 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9pck\" (UniqueName: \"kubernetes.io/projected/5ba25763-8b03-46c7-bc29-e401dd42266c-kube-api-access-k9pck\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-auth-proxy-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb575284-6b4b-49ab-b314-ba85d494ef6c-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.461946 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.462027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zsqj\" (UniqueName: \"kubernetes.io/projected/940eecee-282d-4cf5-b296-0a9b63ea3d95-kube-api-access-5zsqj\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.462115 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q2kk\" (UniqueName: \"kubernetes.io/projected/387582b8-e641-4519-a20f-794c23707b51-kube-api-access-8q2kk\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.462207 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940eecee-282d-4cf5-b296-0a9b63ea3d95-serving-cert\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.462291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br4rq\" (UniqueName: \"kubernetes.io/projected/cb575284-6b4b-49ab-b314-ba85d494ef6c-kube-api-access-br4rq\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.464370 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.464764 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.464940 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.469840 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jt4rr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.470464 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.470756 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.470994 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.471100 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.471549 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.471631 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-xpn4x"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.476131 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lctf8"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.476172 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.476242 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.477104 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.477549 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.478106 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.478350 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.500574 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.504165 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.506918 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.527277 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.527819 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.527889 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d869q"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.539363 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-srb5s"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.539790 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.540122 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.540169 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.540234 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.540251 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.541228 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.542655 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.543271 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.543379 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-65vgm"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.543969 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.545342 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.545478 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-p5dgb"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.546493 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.546987 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.547457 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.549024 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.550571 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.551238 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.552450 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.554521 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.556263 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.556980 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.557389 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.557550 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.557886 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.558121 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.558541 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.559954 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.561253 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cvxjv"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.561411 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563446 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-client\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563747 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0221af58-b758-43bd-8c44-5af77ab00967-signing-key\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb575284-6b4b-49ab-b314-ba85d494ef6c-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564081 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564208 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26aa0d14-edd4-44c6-84d8-86b8361adfbd-serving-cert\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564354 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q2kk\" (UniqueName: \"kubernetes.io/projected/387582b8-e641-4519-a20f-794c23707b51-kube-api-access-8q2kk\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564473 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940eecee-282d-4cf5-b296-0a9b63ea3d95-serving-cert\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564588 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zsqj\" (UniqueName: \"kubernetes.io/projected/940eecee-282d-4cf5-b296-0a9b63ea3d95-kube-api-access-5zsqj\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564699 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rshnk\" (UniqueName: \"kubernetes.io/projected/e668a4f5-eb70-407c-91de-67acaaa1e03a-kube-api-access-rshnk\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563467 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vs6k9"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564937 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br4rq\" (UniqueName: \"kubernetes.io/projected/cb575284-6b4b-49ab-b314-ba85d494ef6c-kube-api-access-br4rq\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.564972 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-serving-cert\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565004 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-encryption-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565034 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565060 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-images\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36a8e839-34d3-48ad-908e-4bcc8c996cca-machine-approver-tls\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frfhs\" (UniqueName: \"kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm6p8\" (UniqueName: \"kubernetes.io/projected/26aa0d14-edd4-44c6-84d8-86b8361adfbd-kube-api-access-tm6p8\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565154 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1f83bd3-b657-494d-86b4-0981418d9c8d-proxy-tls\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565188 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf55v\" (UniqueName: \"kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565273 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-config\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565302 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565326 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565385 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565423 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565450 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565475 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565504 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-service-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565516 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565529 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-serving-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.565557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.563711 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566190 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-config\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566254 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-client\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-images\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87668f27-58db-4e11-b03d-9e9c5fd8f35c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566341 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87668f27-58db-4e11-b03d-9e9c5fd8f35c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpp85\" (UniqueName: \"kubernetes.io/projected/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-kube-api-access-hpp85\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566431 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hlj6\" (UniqueName: \"kubernetes.io/projected/f1f83bd3-b657-494d-86b4-0981418d9c8d-kube-api-access-4hlj6\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.566448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-client\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567074 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqwp2\" (UniqueName: \"kubernetes.io/projected/0221af58-b758-43bd-8c44-5af77ab00967-kube-api-access-kqwp2\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit-dir\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567162 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khdpt\" (UniqueName: \"kubernetes.io/projected/209b3283-9e92-472c-bc99-10d9d56788b2-kube-api-access-khdpt\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567268 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnn6q\" (UniqueName: \"kubernetes.io/projected/36a8e839-34d3-48ad-908e-4bcc8c996cca-kube-api-access-pnn6q\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567503 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/940eecee-282d-4cf5-b296-0a9b63ea3d95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567543 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-trusted-ca\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567767 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1f83bd3-b657-494d-86b4-0981418d9c8d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567839 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/940eecee-282d-4cf5-b296-0a9b63ea3d95-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567838 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfjks\" (UniqueName: \"kubernetes.io/projected/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-kube-api-access-hfjks\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.567966 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/209b3283-9e92-472c-bc99-10d9d56788b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568059 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/387582b8-e641-4519-a20f-794c23707b51-metrics-tls\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-config\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568144 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568199 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87668f27-58db-4e11-b03d-9e9c5fd8f35c-config\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568217 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-serving-cert\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/209b3283-9e92-472c-bc99-10d9d56788b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568611 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-encryption-config\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568674 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rs2m\" (UniqueName: \"kubernetes.io/projected/5f2f6be3-04c5-4f5f-a545-b30d9e652982-kube-api-access-6rs2m\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568776 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-config\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.568992 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569029 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-service-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569480 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0221af58-b758-43bd-8c44-5af77ab00967-signing-cabundle\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-srv-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569964 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckwfz\" (UniqueName: \"kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570126 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-image-import-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-serving-cert\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570372 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-dir\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570503 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9pck\" (UniqueName: \"kubernetes.io/projected/5ba25763-8b03-46c7-bc29-e401dd42266c-kube-api-access-k9pck\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570842 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb575284-6b4b-49ab-b314-ba85d494ef6c-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570958 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570982 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/940eecee-282d-4cf5-b296-0a9b63ea3d95-serving-cert\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-ca\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.570891 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/36a8e839-34d3-48ad-908e-4bcc8c996cca-machine-approver-tls\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571109 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571134 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-config\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-node-pullsecrets\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571190 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-auth-proxy-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569961 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/209b3283-9e92-472c-bc99-10d9d56788b2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571211 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-policies\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.569999 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zhchl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571788 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.571831 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/36a8e839-34d3-48ad-908e-4bcc8c996cca-auth-proxy-config\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.572015 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.572231 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.572393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.572506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-etcd-client\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.573133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.574593 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/387582b8-e641-4519-a20f-794c23707b51-metrics-tls\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.576946 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.576995 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.578037 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzpjg"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.580366 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.584931 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ba25763-8b03-46c7-bc29-e401dd42266c-serving-cert\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.586316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/209b3283-9e92-472c-bc99-10d9d56788b2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.586379 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.587863 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mrvfm"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.588037 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.589054 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q7mdj"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.590193 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6nxvx"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.590733 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb575284-6b4b-49ab-b314-ba85d494ef6c-config\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.591112 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.592927 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d869q"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.593786 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.595554 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xpn4x"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.596877 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.597990 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.600169 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.600558 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.602471 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.603265 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jt4rr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.603324 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.606196 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.608555 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.609813 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-srb5s"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.612972 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.624073 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.628158 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-65vgm"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.631190 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ts9kq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.633071 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-w784k"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.633571 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.633758 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.634703 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.636529 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.638055 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ts9kq"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.639292 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.640604 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.641937 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.643656 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.643696 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.645577 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.647091 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.648610 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6nxvx"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.650049 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-w784k"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.651436 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-sgdgm"] Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.652756 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.664512 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.671924 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-trusted-ca\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.671972 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1f83bd3-b657-494d-86b4-0981418d9c8d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672003 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfjks\" (UniqueName: \"kubernetes.io/projected/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-kube-api-access-hfjks\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672045 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87668f27-58db-4e11-b03d-9e9c5fd8f35c-config\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-serving-cert\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-encryption-config\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rs2m\" (UniqueName: \"kubernetes.io/projected/5f2f6be3-04c5-4f5f-a545-b30d9e652982-kube-api-access-6rs2m\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672174 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-image-import-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672240 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0221af58-b758-43bd-8c44-5af77ab00967-signing-cabundle\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672263 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-srv-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672288 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-dir\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672320 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672343 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672371 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672396 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-config\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672425 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-node-pullsecrets\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672452 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-policies\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672482 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-client\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672536 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26aa0d14-edd4-44c6-84d8-86b8361adfbd-serving-cert\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672564 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0221af58-b758-43bd-8c44-5af77ab00967-signing-key\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rshnk\" (UniqueName: \"kubernetes.io/projected/e668a4f5-eb70-407c-91de-67acaaa1e03a-kube-api-access-rshnk\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672638 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-serving-cert\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672701 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-encryption-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frfhs\" (UniqueName: \"kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672795 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-config\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672825 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm6p8\" (UniqueName: \"kubernetes.io/projected/26aa0d14-edd4-44c6-84d8-86b8361adfbd-kube-api-access-tm6p8\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672873 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1f83bd3-b657-494d-86b4-0981418d9c8d-proxy-tls\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672902 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672931 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672963 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.672992 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-serving-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673020 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87668f27-58db-4e11-b03d-9e9c5fd8f35c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87668f27-58db-4e11-b03d-9e9c5fd8f35c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673071 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hlj6\" (UniqueName: \"kubernetes.io/projected/f1f83bd3-b657-494d-86b4-0981418d9c8d-kube-api-access-4hlj6\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673127 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-client\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673153 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpp85\" (UniqueName: \"kubernetes.io/projected/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-kube-api-access-hpp85\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqwp2\" (UniqueName: \"kubernetes.io/projected/0221af58-b758-43bd-8c44-5af77ab00967-kube-api-access-kqwp2\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-trusted-ca\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673228 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit-dir\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673256 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673264 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f1f83bd3-b657-494d-86b4-0981418d9c8d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.673825 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.674122 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-dir\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.674395 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-node-pullsecrets\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.675127 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.675373 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87668f27-58db-4e11-b03d-9e9c5fd8f35c-config\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.675765 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e668a4f5-eb70-407c-91de-67acaaa1e03a-audit-dir\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.676388 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0221af58-b758-43bd-8c44-5af77ab00967-signing-cabundle\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.676795 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.676864 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26aa0d14-edd4-44c6-84d8-86b8361adfbd-config\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.676892 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.677140 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-image-import-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.677653 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-config\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.678019 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-serving-ca\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.678180 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.679317 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.679611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-etcd-client\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.680338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.680402 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0221af58-b758-43bd-8c44-5af77ab00967-signing-key\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.680351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-srv-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.680668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e668a4f5-eb70-407c-91de-67acaaa1e03a-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.681024 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5f2f6be3-04c5-4f5f-a545-b30d9e652982-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.681023 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87668f27-58db-4e11-b03d-9e9c5fd8f35c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.681396 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-encryption-config\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.681641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-audit-policies\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.682087 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-encryption-config\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.682409 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e668a4f5-eb70-407c-91de-67acaaa1e03a-serving-cert\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.682564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.683067 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f1f83bd3-b657-494d-86b4-0981418d9c8d-proxy-tls\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.683240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-serving-cert\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.683826 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26aa0d14-edd4-44c6-84d8-86b8361adfbd-serving-cert\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.684392 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.684529 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-etcd-client\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.686805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.704668 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.764501 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.784329 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.804052 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.825641 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.844624 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.864024 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.884636 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.903780 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.924659 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.944832 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 21 17:58:35 crc kubenswrapper[4792]: I0121 17:58:35.983898 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.004865 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.006229 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.024904 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.044871 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.064694 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.083976 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.104151 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.124921 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.144361 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.165033 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.183764 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.204233 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.224416 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.244074 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.263587 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.283231 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.303818 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.323737 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.344504 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.372939 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.383982 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.404378 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.424430 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.444321 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.463969 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.483950 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.504509 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.523990 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.544722 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.562266 4792 request.go:700] Waited for 1.014534662s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.579610 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.583599 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.604119 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.624827 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.644607 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.668072 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.685109 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.704619 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.736082 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.747685 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.764458 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.812280 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.823583 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.830182 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.843656 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.864763 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.884562 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.905180 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.925024 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.944494 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.964104 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 21 17:58:36 crc kubenswrapper[4792]: I0121 17:58:36.988667 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.003732 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.030771 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.044196 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.072755 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.084415 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.105363 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.124287 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.144543 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.165020 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.200609 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q2kk\" (UniqueName: \"kubernetes.io/projected/387582b8-e641-4519-a20f-794c23707b51-kube-api-access-8q2kk\") pod \"dns-operator-744455d44c-cvxjv\" (UID: \"387582b8-e641-4519-a20f-794c23707b51\") " pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.234451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zsqj\" (UniqueName: \"kubernetes.io/projected/940eecee-282d-4cf5-b296-0a9b63ea3d95-kube-api-access-5zsqj\") pod \"openshift-config-operator-7777fb866f-lctf8\" (UID: \"940eecee-282d-4cf5-b296-0a9b63ea3d95\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.261788 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br4rq\" (UniqueName: \"kubernetes.io/projected/cb575284-6b4b-49ab-b314-ba85d494ef6c-kube-api-access-br4rq\") pod \"machine-api-operator-5694c8668f-zhchl\" (UID: \"cb575284-6b4b-49ab-b314-ba85d494ef6c\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.265121 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf55v\" (UniqueName: \"kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v\") pod \"controller-manager-879f6c89f-dz4cq\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.274523 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.280776 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khdpt\" (UniqueName: \"kubernetes.io/projected/209b3283-9e92-472c-bc99-10d9d56788b2-kube-api-access-khdpt\") pod \"openshift-apiserver-operator-796bbdcf4f-w9bvf\" (UID: \"209b3283-9e92-472c-bc99-10d9d56788b2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.301633 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnn6q\" (UniqueName: \"kubernetes.io/projected/36a8e839-34d3-48ad-908e-4bcc8c996cca-kube-api-access-pnn6q\") pod \"machine-approver-56656f9798-dvxl6\" (UID: \"36a8e839-34d3-48ad-908e-4bcc8c996cca\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.319022 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.321637 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckwfz\" (UniqueName: \"kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz\") pod \"route-controller-manager-6576b87f9c-42ml5\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.341834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9pck\" (UniqueName: \"kubernetes.io/projected/5ba25763-8b03-46c7-bc29-e401dd42266c-kube-api-access-k9pck\") pod \"etcd-operator-b45778765-vs6k9\" (UID: \"5ba25763-8b03-46c7-bc29-e401dd42266c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.343999 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.364685 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.384293 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.406834 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.428154 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.444707 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.465010 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.476322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.483939 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.498357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.507446 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.534567 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.534737 4792 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.534968 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.544080 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.551929 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.562472 4792 request.go:700] Waited for 1.909352057s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.564706 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 21 17:58:37 crc kubenswrapper[4792]: W0121 17:58:37.581973 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36a8e839_34d3_48ad_908e_4bcc8c996cca.slice/crio-d7a0a81b7c1e8bcc2d3c1134c5bcbd432ff92714eedda50c4a9c482091a70fa5 WatchSource:0}: Error finding container d7a0a81b7c1e8bcc2d3c1134c5bcbd432ff92714eedda50c4a9c482091a70fa5: Status 404 returned error can't find the container with id d7a0a81b7c1e8bcc2d3c1134c5bcbd432ff92714eedda50c4a9c482091a70fa5 Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.584226 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.605415 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.623262 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c2333c-edb3-4c41-9780-f3bc5c60ce18-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-r49jw\" (UID: \"c5c2333c-edb3-4c41-9780-f3bc5c60ce18\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.695572 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfjks\" (UniqueName: \"kubernetes.io/projected/8106ce05-fe62-4b3e-93fd-0acb505d4e4b-kube-api-access-hfjks\") pod \"openshift-controller-manager-operator-756b6f6bc6-g2vzr\" (UID: \"8106ce05-fe62-4b3e-93fd-0acb505d4e4b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.698108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hlj6\" (UniqueName: \"kubernetes.io/projected/f1f83bd3-b657-494d-86b4-0981418d9c8d-kube-api-access-4hlj6\") pod \"machine-config-controller-84d6567774-22hnh\" (UID: \"f1f83bd3-b657-494d-86b4-0981418d9c8d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.726337 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rs2m\" (UniqueName: \"kubernetes.io/projected/5f2f6be3-04c5-4f5f-a545-b30d9e652982-kube-api-access-6rs2m\") pod \"olm-operator-6b444d44fb-8xdnr\" (UID: \"5f2f6be3-04c5-4f5f-a545-b30d9e652982\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.733099 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.831172 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.836545 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912489 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjr2g\" (UniqueName: \"kubernetes.io/projected/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-kube-api-access-bjr2g\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912523 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-webhook-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912600 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbmrt\" (UniqueName: \"kubernetes.io/projected/1001cdf3-8e91-4e17-a8f7-a92b91daf23e-kube-api-access-fbmrt\") pod \"downloads-7954f5f757-xpn4x\" (UID: \"1001cdf3-8e91-4e17-a8f7-a92b91daf23e\") " pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912687 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4386205-1780-4ef2-a790-edab679ee8a7-trusted-ca\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912710 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912772 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf97f\" (UniqueName: \"kubernetes.io/projected/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-kube-api-access-kf97f\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912844 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912938 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.912981 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjrq5\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913005 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-tmpfs\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913029 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913056 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913087 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a4386205-1780-4ef2-a790-edab679ee8a7-metrics-tls\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8tdc\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-kube-api-access-q8tdc\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913147 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.913180 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:37 crc kubenswrapper[4792]: E0121 17:58:37.913525 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.413511619 +0000 UTC m=+152.395474805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.955652 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87668f27-58db-4e11-b03d-9e9c5fd8f35c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-h99r2\" (UID: \"87668f27-58db-4e11-b03d-9e9c5fd8f35c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.955938 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frfhs\" (UniqueName: \"kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs\") pod \"collect-profiles-29483625-rljlr\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.955961 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpp85\" (UniqueName: \"kubernetes.io/projected/7bf30f38-58cd-46e3-b3af-4f2c47124d2a-kube-api-access-hpp85\") pod \"apiserver-7bbb656c7d-4k5zt\" (UID: \"7bf30f38-58cd-46e3-b3af-4f2c47124d2a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.956098 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm6p8\" (UniqueName: \"kubernetes.io/projected/26aa0d14-edd4-44c6-84d8-86b8361adfbd-kube-api-access-tm6p8\") pod \"console-operator-58897d9998-kzpjg\" (UID: \"26aa0d14-edd4-44c6-84d8-86b8361adfbd\") " pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.956607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rshnk\" (UniqueName: \"kubernetes.io/projected/e668a4f5-eb70-407c-91de-67acaaa1e03a-kube-api-access-rshnk\") pod \"apiserver-76f77b778f-q7mdj\" (UID: \"e668a4f5-eb70-407c-91de-67acaaa1e03a\") " pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.961135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqwp2\" (UniqueName: \"kubernetes.io/projected/0221af58-b758-43bd-8c44-5af77ab00967-kube-api-access-kqwp2\") pod \"service-ca-9c57cc56f-jt4rr\" (UID: \"0221af58-b758-43bd-8c44-5af77ab00967\") " pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.968458 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.970050 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cvxjv"] Jan 21 17:58:37 crc kubenswrapper[4792]: I0121 17:58:37.979139 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zhchl"] Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014417 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.014723 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.514697495 +0000 UTC m=+152.496660681 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014784 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf97f\" (UniqueName: \"kubernetes.io/projected/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-kube-api-access-kf97f\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014878 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014902 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.014993 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015021 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dcrj\" (UniqueName: \"kubernetes.io/projected/aa38141d-14db-436f-b9f7-9270a301ef1e-kube-api-access-7dcrj\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sggwj\" (UniqueName: \"kubernetes.io/projected/a0ea4efe-8d03-4090-b537-344474f2cdac-kube-api-access-sggwj\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015171 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a02b713-1a2c-43d9-9ed2-de57e40a2364-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015199 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015227 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0ea4efe-8d03-4090-b537-344474f2cdac-metrics-tls\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjrq5\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015285 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-tmpfs\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015388 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-config\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-service-ca\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015477 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015546 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-config\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn9ql\" (UniqueName: \"kubernetes.io/projected/bdbd468f-304e-429c-9957-e60c6e756db4-kube-api-access-dn9ql\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015600 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148627d0-5541-4e87-9b4c-cf22303d76b5-cert\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015630 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015751 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxczd\" (UniqueName: \"kubernetes.io/projected/5be3fc9c-cfde-41ee-8383-02800d3990cd-kube-api-access-sxczd\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d14cc544-196a-41ec-aa6e-ffca799481e7-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.015986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvfks\" (UniqueName: \"kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016024 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc380d9c-2633-4904-8225-bd456fdb103b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016058 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjr2g\" (UniqueName: \"kubernetes.io/projected/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-kube-api-access-bjr2g\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/526cf46b-9c0e-4ea2-850f-e9fba77a9449-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016133 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ea4efe-8d03-4090-b537-344474f2cdac-config-volume\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016195 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n6w5\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-kube-api-access-8n6w5\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016254 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dc72b61-a3c4-4a8a-b969-94f9a9036946-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016281 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnjgq\" (UniqueName: \"kubernetes.io/projected/37e5c807-b97c-4317-83a8-8266feac9df8-kube-api-access-jnjgq\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016311 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv447\" (UniqueName: \"kubernetes.io/projected/4a02b713-1a2c-43d9-9ed2-de57e40a2364-kube-api-access-wv447\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016344 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbmrt\" (UniqueName: \"kubernetes.io/projected/1001cdf3-8e91-4e17-a8f7-a92b91daf23e-kube-api-access-fbmrt\") pod \"downloads-7954f5f757-xpn4x\" (UID: \"1001cdf3-8e91-4e17-a8f7-a92b91daf23e\") " pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016372 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjmg9\" (UniqueName: \"kubernetes.io/projected/72615ab0-4d57-4849-89f9-3f660e97825b-kube-api-access-bjmg9\") pod \"migrator-59844c95c7-p8ffl\" (UID: \"72615ab0-4d57-4849-89f9-3f660e97825b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016729 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc72b61-a3c4-4a8a-b969-94f9a9036946-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016895 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gsxl\" (UniqueName: \"kubernetes.io/projected/c705c522-8d1a-4f0e-9d7a-671489591029-kube-api-access-4gsxl\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016923 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hbkp\" (UniqueName: \"kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.016972 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-serving-cert\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017033 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42lfw\" (UniqueName: \"kubernetes.io/projected/526cf46b-9c0e-4ea2-850f-e9fba77a9449-kube-api-access-42lfw\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017120 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb4w8\" (UniqueName: \"kubernetes.io/projected/4352f184-e8fe-408e-b28b-db2cd474622f-kube-api-access-bb4w8\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d14cc544-196a-41ec-aa6e-ffca799481e7-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017192 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017256 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017287 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017317 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9zbr\" (UniqueName: \"kubernetes.io/projected/1166298c-e29b-4c0a-b153-d92acf85b0d2-kube-api-access-c9zbr\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017348 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-csi-data-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017394 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7nd6\" (UniqueName: \"kubernetes.io/projected/cc380d9c-2633-4904-8225-bd456fdb103b-kube-api-access-h7nd6\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017464 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-tmpfs\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.017751 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.019001 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022173 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-trusted-ca-bundle\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022211 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dc72b61-a3c4-4a8a-b969-94f9a9036946-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022277 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-socket-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022299 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022497 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-node-bootstrap-token\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022522 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022540 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bdbd468f-304e-429c-9957-e60c6e756db4-proxy-tls\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022593 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022618 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022636 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-service-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022657 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c705c522-8d1a-4f0e-9d7a-671489591029-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022686 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-srv-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022740 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a4386205-1780-4ef2-a790-edab679ee8a7-metrics-tls\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.022761 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-metrics-certs\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023104 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-oauth-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be3fc9c-cfde-41ee-8383-02800d3990cd-serving-cert\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8tdc\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-kube-api-access-q8tdc\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023230 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-certs\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023246 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-default-certificate\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023269 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-profile-collector-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023364 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-mountpoint-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023385 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwbxd\" (UniqueName: \"kubernetes.io/projected/6c6b48a4-1067-4a71-b66a-24a9677e6c37-kube-api-access-zwbxd\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-oauth-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023447 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-registration-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023479 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-webhook-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023516 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.023542 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-plugins-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.024475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028369 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc380d9c-2633-4904-8225-bd456fdb103b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028397 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mphf\" (UniqueName: \"kubernetes.io/projected/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-kube-api-access-8mphf\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-stats-auth\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028502 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028544 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-images\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028565 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shh4k\" (UniqueName: \"kubernetes.io/projected/148627d0-5541-4e87-9b4c-cf22303d76b5-kube-api-access-shh4k\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4386205-1780-4ef2-a790-edab679ee8a7-trusted-ca\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028629 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.028650 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c6b48a4-1067-4a71-b66a-24a9677e6c37-service-ca-bundle\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.033659 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.051911 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-webhook-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.052303 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjr2g\" (UniqueName: \"kubernetes.io/projected/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-kube-api-access-bjr2g\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.060596 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a4386205-1780-4ef2-a790-edab679ee8a7-metrics-tls\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.062417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjrq5\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.062435 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf97f\" (UniqueName: \"kubernetes.io/projected/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-kube-api-access-kf97f\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.063701 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.563680493 +0000 UTC m=+152.545643679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.065547 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4386205-1780-4ef2-a790-edab679ee8a7-trusted-ca\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.075940 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.077377 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.082518 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbmrt\" (UniqueName: \"kubernetes.io/projected/1001cdf3-8e91-4e17-a8f7-a92b91daf23e-kube-api-access-fbmrt\") pod \"downloads-7954f5f757-xpn4x\" (UID: \"1001cdf3-8e91-4e17-a8f7-a92b91daf23e\") " pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.084995 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8tdc\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-kube-api-access-q8tdc\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.091357 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-5jx5t\" (UID: \"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.090090 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3d05043f-32a0-4b55-ac55-3f48b3c25bc5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mrvfm\" (UID: \"3d05043f-32a0-4b55-ac55-3f48b3c25bc5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.105500 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.120326 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" event={"ID":"cb575284-6b4b-49ab-b314-ba85d494ef6c","Type":"ContainerStarted","Data":"29b386fad6ca601992d08f5a37876437898aab4fd0546d2c6edaef660d37c1b9"} Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.121713 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" event={"ID":"387582b8-e641-4519-a20f-794c23707b51","Type":"ContainerStarted","Data":"392a464da52bc43364a63198cb248a50b468e48c421e202ffcff2bfc289b3b30"} Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.123638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.134476 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.135403 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" event={"ID":"36a8e839-34d3-48ad-908e-4bcc8c996cca","Type":"ContainerStarted","Data":"d7a0a81b7c1e8bcc2d3c1134c5bcbd432ff92714eedda50c4a9c482091a70fa5"} Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.197740 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.197900 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.697879321 +0000 UTC m=+152.679842507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.197939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-csi-data-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.197974 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9zbr\" (UniqueName: \"kubernetes.io/projected/1166298c-e29b-4c0a-b153-d92acf85b0d2-kube-api-access-c9zbr\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.197993 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7nd6\" (UniqueName: \"kubernetes.io/projected/cc380d9c-2633-4904-8225-bd456fdb103b-kube-api-access-h7nd6\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198012 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-trusted-ca-bundle\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198030 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dc72b61-a3c4-4a8a-b969-94f9a9036946-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-socket-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198089 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198106 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-csi-data-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198123 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-node-bootstrap-token\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198138 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bdbd468f-304e-429c-9957-e60c6e756db4-proxy-tls\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198154 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.198157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.203740 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.204240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-socket-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.206944 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-service-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.206995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c705c522-8d1a-4f0e-9d7a-671489591029-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207030 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-srv-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207058 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207086 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-oauth-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-metrics-certs\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207141 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be3fc9c-cfde-41ee-8383-02800d3990cd-serving-cert\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207173 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-certs\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207198 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-default-certificate\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-profile-collector-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207335 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-mountpoint-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207369 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207390 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwbxd\" (UniqueName: \"kubernetes.io/projected/6c6b48a4-1067-4a71-b66a-24a9677e6c37-kube-api-access-zwbxd\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-oauth-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207432 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-registration-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207462 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207490 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-plugins-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207541 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc380d9c-2633-4904-8225-bd456fdb103b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207573 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mphf\" (UniqueName: \"kubernetes.io/projected/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-kube-api-access-8mphf\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207607 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-stats-auth\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.207635 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.208102 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-registration-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.208583 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.708562597 +0000 UTC m=+152.690525783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.209037 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-mountpoint-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.209731 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/aa38141d-14db-436f-b9f7-9270a301ef1e-plugins-dir\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.210338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.212622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-oauth-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.213351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219077 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-images\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219133 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shh4k\" (UniqueName: \"kubernetes.io/projected/148627d0-5541-4e87-9b4c-cf22303d76b5-kube-api-access-shh4k\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219169 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c6b48a4-1067-4a71-b66a-24a9677e6c37-service-ca-bundle\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219218 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219245 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219273 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219296 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dcrj\" (UniqueName: \"kubernetes.io/projected/aa38141d-14db-436f-b9f7-9270a301ef1e-kube-api-access-7dcrj\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sggwj\" (UniqueName: \"kubernetes.io/projected/a0ea4efe-8d03-4090-b537-344474f2cdac-kube-api-access-sggwj\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219341 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a02b713-1a2c-43d9-9ed2-de57e40a2364-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219381 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0ea4efe-8d03-4090-b537-344474f2cdac-metrics-tls\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219401 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219428 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-config\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219451 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-service-ca\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-config\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219526 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn9ql\" (UniqueName: \"kubernetes.io/projected/bdbd468f-304e-429c-9957-e60c6e756db4-kube-api-access-dn9ql\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148627d0-5541-4e87-9b4c-cf22303d76b5-cert\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219626 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219669 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxczd\" (UniqueName: \"kubernetes.io/projected/5be3fc9c-cfde-41ee-8383-02800d3990cd-kube-api-access-sxczd\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d14cc544-196a-41ec-aa6e-ffca799481e7-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvfks\" (UniqueName: \"kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219758 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc380d9c-2633-4904-8225-bd456fdb103b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/526cf46b-9c0e-4ea2-850f-e9fba77a9449-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219815 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ea4efe-8d03-4090-b537-344474f2cdac-config-volume\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219840 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n6w5\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-kube-api-access-8n6w5\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219889 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dc72b61-a3c4-4a8a-b969-94f9a9036946-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219952 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnjgq\" (UniqueName: \"kubernetes.io/projected/37e5c807-b97c-4317-83a8-8266feac9df8-kube-api-access-jnjgq\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.220728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.221275 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-config\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.221471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bdbd468f-304e-429c-9957-e60c6e756db4-images\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222402 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c6b48a4-1067-4a71-b66a-24a9677e6c37-service-ca-bundle\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.219977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv447\" (UniqueName: \"kubernetes.io/projected/4a02b713-1a2c-43d9-9ed2-de57e40a2364-kube-api-access-wv447\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222786 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjmg9\" (UniqueName: \"kubernetes.io/projected/72615ab0-4d57-4849-89f9-3f660e97825b-kube-api-access-bjmg9\") pod \"migrator-59844c95c7-p8ffl\" (UID: \"72615ab0-4d57-4849-89f9-3f660e97825b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc72b61-a3c4-4a8a-b969-94f9a9036946-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222865 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gsxl\" (UniqueName: \"kubernetes.io/projected/c705c522-8d1a-4f0e-9d7a-671489591029-kube-api-access-4gsxl\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hbkp\" (UniqueName: \"kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222907 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-serving-cert\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222925 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42lfw\" (UniqueName: \"kubernetes.io/projected/526cf46b-9c0e-4ea2-850f-e9fba77a9449-kube-api-access-42lfw\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb4w8\" (UniqueName: \"kubernetes.io/projected/4352f184-e8fe-408e-b28b-db2cd474622f-kube-api-access-bb4w8\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222967 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d14cc544-196a-41ec-aa6e-ffca799481e7-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.222984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.223002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.223060 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-metrics-certs\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.223951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0ea4efe-8d03-4090-b537-344474f2cdac-config-volume\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.231259 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-service-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.231428 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.231817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be3fc9c-cfde-41ee-8383-02800d3990cd-serving-cert\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.234068 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-config\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.235313 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc72b61-a3c4-4a8a-b969-94f9a9036946-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.236138 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be3fc9c-cfde-41ee-8383-02800d3990cd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.242343 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.242484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.242681 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc380d9c-2633-4904-8225-bd456fdb103b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.250365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.254421 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.261927 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.278814 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.278952 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.280188 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/148627d0-5541-4e87-9b4c-cf22303d76b5-cert\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.280706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-serving-cert\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.281972 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.282869 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.283431 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d14cc544-196a-41ec-aa6e-ffca799481e7-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.283544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/526cf46b-9c0e-4ea2-850f-e9fba77a9449-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.283988 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-stats-auth\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.284076 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2dc72b61-a3c4-4a8a-b969-94f9a9036946-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.284276 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.294371 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bdbd468f-304e-429c-9957-e60c6e756db4-proxy-tls\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.297276 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c705c522-8d1a-4f0e-9d7a-671489591029-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.306002 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.310316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9zbr\" (UniqueName: \"kubernetes.io/projected/1166298c-e29b-4c0a-b153-d92acf85b0d2-kube-api-access-c9zbr\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.311260 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a0ea4efe-8d03-4090-b537-344474f2cdac-metrics-tls\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.312537 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwbxd\" (UniqueName: \"kubernetes.io/projected/6c6b48a4-1067-4a71-b66a-24a9677e6c37-kube-api-access-zwbxd\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.312886 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.315696 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a02b713-1a2c-43d9-9ed2-de57e40a2364-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.318729 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6c6b48a4-1067-4a71-b66a-24a9677e6c37-default-certificate\") pod \"router-default-5444994796-p5dgb\" (UID: \"6c6b48a4-1067-4a71-b66a-24a9677e6c37\") " pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.321574 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dc72b61-a3c4-4a8a-b969-94f9a9036946-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2zfjc\" (UID: \"2dc72b61-a3c4-4a8a-b969-94f9a9036946\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.324682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.327080 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mphf\" (UniqueName: \"kubernetes.io/projected/f8faafed-92b2-4ebc-9d37-9b7fc1d32067-kube-api-access-8mphf\") pod \"service-ca-operator-777779d784-qq7s6\" (UID: \"f8faafed-92b2-4ebc-9d37-9b7fc1d32067\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.327170 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.827151987 +0000 UTC m=+152.809115173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.332118 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.354577 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.356524 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.360126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.360425 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-node-bootstrap-token\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.361028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d14cc544-196a-41ec-aa6e-ffca799481e7-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.361166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/37e5c807-b97c-4317-83a8-8266feac9df8-certs\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.361609 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-srv-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.362220 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4352f184-e8fe-408e-b28b-db2cd474622f-profile-collector-cert\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.364393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4386205-1780-4ef2-a790-edab679ee8a7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7v9hk\" (UID: \"a4386205-1780-4ef2-a790-edab679ee8a7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.364461 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-trusted-ca-bundle\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.365257 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dcrj\" (UniqueName: \"kubernetes.io/projected/aa38141d-14db-436f-b9f7-9270a301ef1e-kube-api-access-7dcrj\") pod \"csi-hostpathplugin-ts9kq\" (UID: \"aa38141d-14db-436f-b9f7-9270a301ef1e\") " pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.365676 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-service-ca\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.373194 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc380d9c-2633-4904-8225-bd456fdb103b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.373565 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.373922 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-oauth-config\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.377458 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1166298c-e29b-4c0a-b153-d92acf85b0d2-console-serving-cert\") pod \"console-f9d7485db-65vgm\" (UID: \"1166298c-e29b-4c0a-b153-d92acf85b0d2\") " pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.387835 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shh4k\" (UniqueName: \"kubernetes.io/projected/148627d0-5541-4e87-9b4c-cf22303d76b5-kube-api-access-shh4k\") pod \"ingress-canary-w784k\" (UID: \"148627d0-5541-4e87-9b4c-cf22303d76b5\") " pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.388369 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7nd6\" (UniqueName: \"kubernetes.io/projected/cc380d9c-2633-4904-8225-bd456fdb103b-kube-api-access-h7nd6\") pod \"kube-storage-version-migrator-operator-b67b599dd-7w8k4\" (UID: \"cc380d9c-2633-4904-8225-bd456fdb103b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.390176 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sggwj\" (UniqueName: \"kubernetes.io/projected/a0ea4efe-8d03-4090-b537-344474f2cdac-kube-api-access-sggwj\") pod \"dns-default-6nxvx\" (UID: \"a0ea4efe-8d03-4090-b537-344474f2cdac\") " pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.405362 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n6w5\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-kube-api-access-8n6w5\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.435415 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.435877 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:38.935823859 +0000 UTC m=+152.917787045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.453246 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.457181 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42lfw\" (UniqueName: \"kubernetes.io/projected/526cf46b-9c0e-4ea2-850f-e9fba77a9449-kube-api-access-42lfw\") pod \"cluster-samples-operator-665b6dd947-f659j\" (UID: \"526cf46b-9c0e-4ea2-850f-e9fba77a9449\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.457379 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.471008 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv447\" (UniqueName: \"kubernetes.io/projected/4a02b713-1a2c-43d9-9ed2-de57e40a2364-kube-api-access-wv447\") pod \"control-plane-machine-set-operator-78cbb6b69f-r2mgl\" (UID: \"4a02b713-1a2c-43d9-9ed2-de57e40a2364\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.473908 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.524182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.524807 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.525069 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" Jan 21 17:58:38 crc kubenswrapper[4792]: W0121 17:58:38.528537 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3afd98a4_e47a_4e22_ab23_0cbf1bf56e46.slice/crio-87b9569dfdf626a2abeb1a0ee7585c2a639feb30c0d1ba36e7f194b8ab04aa8b WatchSource:0}: Error finding container 87b9569dfdf626a2abeb1a0ee7585c2a639feb30c0d1ba36e7f194b8ab04aa8b: Status 404 returned error can't find the container with id 87b9569dfdf626a2abeb1a0ee7585c2a639feb30c0d1ba36e7f194b8ab04aa8b Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.529466 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb4w8\" (UniqueName: \"kubernetes.io/projected/4352f184-e8fe-408e-b28b-db2cd474622f-kube-api-access-bb4w8\") pod \"catalog-operator-68c6474976-nm5x9\" (UID: \"4352f184-e8fe-408e-b28b-db2cd474622f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.534482 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjmg9\" (UniqueName: \"kubernetes.io/projected/72615ab0-4d57-4849-89f9-3f660e97825b-kube-api-access-bjmg9\") pod \"migrator-59844c95c7-p8ffl\" (UID: \"72615ab0-4d57-4849-89f9-3f660e97825b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.547696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.547843 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.047817184 +0000 UTC m=+153.029780370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.548015 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.548316 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.048304077 +0000 UTC m=+153.030267253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.553039 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.556107 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hbkp\" (UniqueName: \"kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp\") pod \"marketplace-operator-79b997595-bdt7b\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.561298 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.561711 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.569465 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d14cc544-196a-41ec-aa6e-ffca799481e7-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4nscl\" (UID: \"d14cc544-196a-41ec-aa6e-ffca799481e7\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.582982 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.592444 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-w784k" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.599611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gsxl\" (UniqueName: \"kubernetes.io/projected/c705c522-8d1a-4f0e-9d7a-671489591029-kube-api-access-4gsxl\") pod \"package-server-manager-789f6589d5-ft2zn\" (UID: \"c705c522-8d1a-4f0e-9d7a-671489591029\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.603606 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn9ql\" (UniqueName: \"kubernetes.io/projected/bdbd468f-304e-429c-9957-e60c6e756db4-kube-api-access-dn9ql\") pod \"machine-config-operator-74547568cd-d869q\" (UID: \"bdbd468f-304e-429c-9957-e60c6e756db4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.604582 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxczd\" (UniqueName: \"kubernetes.io/projected/5be3fc9c-cfde-41ee-8383-02800d3990cd-kube-api-access-sxczd\") pod \"authentication-operator-69f744f599-srb5s\" (UID: \"5be3fc9c-cfde-41ee-8383-02800d3990cd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.610333 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.616044 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvfks\" (UniqueName: \"kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks\") pod \"oauth-openshift-558db77b4-bxqhq\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.632937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnjgq\" (UniqueName: \"kubernetes.io/projected/37e5c807-b97c-4317-83a8-8266feac9df8-kube-api-access-jnjgq\") pod \"machine-config-server-sgdgm\" (UID: \"37e5c807-b97c-4317-83a8-8266feac9df8\") " pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.649679 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.649863 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.149824961 +0000 UTC m=+153.131788147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.650902 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.651253 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.151239677 +0000 UTC m=+153.133202853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.732265 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.737745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.742666 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.752062 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.752207 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.252181307 +0000 UTC m=+153.234144493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.752568 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.752999 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.252990937 +0000 UTC m=+153.234954123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.768641 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.798502 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.814473 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.835412 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.842530 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:38 crc kubenswrapper[4792]: W0121 17:58:38.844469 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39822312_6707_4de1_8cc6_5ab1f513ebf7.slice/crio-3a7156982ed014c57ac0fb01f847336ab3a2c69d4e064ff270c19db15b7fb1b2 WatchSource:0}: Error finding container 3a7156982ed014c57ac0fb01f847336ab3a2c69d4e064ff270c19db15b7fb1b2: Status 404 returned error can't find the container with id 3a7156982ed014c57ac0fb01f847336ab3a2c69d4e064ff270c19db15b7fb1b2 Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.853449 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.853631 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.35360091 +0000 UTC m=+153.335564096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.853698 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.854328 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.354246085 +0000 UTC m=+153.336209271 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.900243 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sgdgm" Jan 21 17:58:38 crc kubenswrapper[4792]: I0121 17:58:38.976413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:38 crc kubenswrapper[4792]: E0121 17:58:38.977020 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.477003439 +0000 UTC m=+153.458966625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.081633 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.088388 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.588366968 +0000 UTC m=+153.570330154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.127213 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf"] Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.136254 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lctf8"] Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.139865 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p5dgb" event={"ID":"6c6b48a4-1067-4a71-b66a-24a9677e6c37","Type":"ContainerStarted","Data":"e8010833702b06daaa2b69bf953562881cf91fefdef4eeccedb655003529f4b2"} Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.141084 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" event={"ID":"36a8e839-34d3-48ad-908e-4bcc8c996cca","Type":"ContainerStarted","Data":"cbdfea3693802c8989dc9278bb13dd1fe8464e9e331532619bbd5e54953b0b94"} Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.150169 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" event={"ID":"39822312-6707-4de1-8cc6-5ab1f513ebf7","Type":"ContainerStarted","Data":"3a7156982ed014c57ac0fb01f847336ab3a2c69d4e064ff270c19db15b7fb1b2"} Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.156310 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerStarted","Data":"87b9569dfdf626a2abeb1a0ee7585c2a639feb30c0d1ba36e7f194b8ab04aa8b"} Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.159005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" event={"ID":"cb575284-6b4b-49ab-b314-ba85d494ef6c","Type":"ContainerStarted","Data":"528887c91fbd2eb3281851e9c4b9597ad47eb66f26ff491d322366bdc0e77b8e"} Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.183525 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.183706 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.683681808 +0000 UTC m=+153.665645004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.183834 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.184146 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.68413832 +0000 UTC m=+153.666101506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.285439 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.285885 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.785834359 +0000 UTC m=+153.767797545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.387305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.387684 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.887670972 +0000 UTC m=+153.869634148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.488665 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.488825 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.988804787 +0000 UTC m=+153.970767973 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.489321 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.489612 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:39.989604128 +0000 UTC m=+153.971567314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.610240 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.610591 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.110566116 +0000 UTC m=+154.092529302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.711780 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.712349 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.212318857 +0000 UTC m=+154.194282043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.813420 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.813905 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.313889613 +0000 UTC m=+154.295852799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:39 crc kubenswrapper[4792]: I0121 17:58:39.915964 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:39 crc kubenswrapper[4792]: E0121 17:58:39.916412 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.416386402 +0000 UTC m=+154.398349588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.017051 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.017685 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.517660511 +0000 UTC m=+154.499623697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.124624 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.124959 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.624945619 +0000 UTC m=+154.606908805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.246982 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.247110 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.747093647 +0000 UTC m=+154.729056823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.247363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.247765 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.747752063 +0000 UTC m=+154.729715249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.349297 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.349781 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.84975986 +0000 UTC m=+154.831723046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.452365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.452807 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:40.952794493 +0000 UTC m=+154.934757679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.554107 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.554512 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.054495802 +0000 UTC m=+155.036458988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.656232 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.656667 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.156648633 +0000 UTC m=+155.138611819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.757207 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.757458 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.257410648 +0000 UTC m=+155.239373834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.761072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.761802 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.261781927 +0000 UTC m=+155.243745113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.863705 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.864142 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.364114603 +0000 UTC m=+155.346077789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:40 crc kubenswrapper[4792]: I0121 17:58:40.967688 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:40 crc kubenswrapper[4792]: E0121 17:58:40.968073 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.468057498 +0000 UTC m=+155.450020684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.070129 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.070380 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.570334852 +0000 UTC m=+155.552298048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.070560 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.070946 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.570929786 +0000 UTC m=+155.552892972 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.179231 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.179363 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.679343763 +0000 UTC m=+155.661306949 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.179610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.179996 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.679984438 +0000 UTC m=+155.661947624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.281135 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.281677 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.781659568 +0000 UTC m=+155.763622754 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.324032 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" event={"ID":"940eecee-282d-4cf5-b296-0a9b63ea3d95","Type":"ContainerStarted","Data":"2b819365eeff44faf2ceaddb2542d75e7472064cb39d63fc11dfc6ba7b7cb5d6"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.326638 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p5dgb" event={"ID":"6c6b48a4-1067-4a71-b66a-24a9677e6c37","Type":"ContainerStarted","Data":"01ce236e48be772398ba17a28ba37ca64f583270f9e4ce7c06acf4908a35bd7b"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.328720 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sgdgm" event={"ID":"37e5c807-b97c-4317-83a8-8266feac9df8","Type":"ContainerStarted","Data":"7ae6ffd5450b55bb0375d77df446d474fbd15fbb25ae536e4f5b966910d6916b"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.345124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" event={"ID":"209b3283-9e92-472c-bc99-10d9d56788b2","Type":"ContainerStarted","Data":"15d0de0a423e2ca184a273c6e45d985f656108c3d5a8b39422ee5c3c23265dbf"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.347532 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerStarted","Data":"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.349686 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.352759 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" event={"ID":"387582b8-e641-4519-a20f-794c23707b51","Type":"ContainerStarted","Data":"a0945723b6c0afe9b4ebfbe023f4c4a3a9099f8bb25e2feda87fc6a5acc6e791"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.354074 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" event={"ID":"36a8e839-34d3-48ad-908e-4bcc8c996cca","Type":"ContainerStarted","Data":"50c5e39fdf793557878d9d42b3ecf13cb7677cad4176bde1b1113f8b96d5c83f"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.355872 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" event={"ID":"39822312-6707-4de1-8cc6-5ab1f513ebf7","Type":"ContainerStarted","Data":"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c"} Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.356121 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.377450 4792 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-42ml5 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.377524 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.377895 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dz4cq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.377917 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.380105 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-p5dgb" podStartSLOduration=134.380086095 podStartE2EDuration="2m14.380086095s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:41.37824362 +0000 UTC m=+155.360206806" watchObservedRunningTime="2026-01-21 17:58:41.380086095 +0000 UTC m=+155.362049291" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.382661 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.385492 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.8854773 +0000 UTC m=+155.867440486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.412810 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" podStartSLOduration=134.412789458 podStartE2EDuration="2m14.412789458s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:41.412340817 +0000 UTC m=+155.394304013" watchObservedRunningTime="2026-01-21 17:58:41.412789458 +0000 UTC m=+155.394752644" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.484156 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.484343 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.984307758 +0000 UTC m=+155.966270944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.484529 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.485042 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:41.985020065 +0000 UTC m=+155.966983251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.526469 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.526905 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.526944 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.587206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.587630 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.087602626 +0000 UTC m=+156.069565812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.597817 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podStartSLOduration=134.597795581 podStartE2EDuration="2m14.597795581s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:41.510268513 +0000 UTC m=+155.492231709" watchObservedRunningTime="2026-01-21 17:58:41.597795581 +0000 UTC m=+155.579758767" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.597966 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" podStartSLOduration=135.597961395 podStartE2EDuration="2m15.597961395s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:41.594824476 +0000 UTC m=+155.576787682" watchObservedRunningTime="2026-01-21 17:58:41.597961395 +0000 UTC m=+155.579924601" Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.699774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.700120 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.200108475 +0000 UTC m=+156.182071661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.800969 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.801121 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.301099547 +0000 UTC m=+156.283062743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.801291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.801656 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.30164388 +0000 UTC m=+156.283607066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:41 crc kubenswrapper[4792]: I0121 17:58:41.902602 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:41 crc kubenswrapper[4792]: E0121 17:58:41.903068 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.403049913 +0000 UTC m=+156.385013099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.004383 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.004834 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.504799962 +0000 UTC m=+156.486763148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.106199 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.106592 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.606575554 +0000 UTC m=+156.588538740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.156007 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vs6k9"] Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.164190 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ba25763_8b03_46c7_bc29_e401dd42266c.slice/crio-4fb9cf1cb7b798675504e21592db525c99336ba77380493f9aade78996b66554 WatchSource:0}: Error finding container 4fb9cf1cb7b798675504e21592db525c99336ba77380493f9aade78996b66554: Status 404 returned error can't find the container with id 4fb9cf1cb7b798675504e21592db525c99336ba77380493f9aade78996b66554 Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.208319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.208657 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.708643103 +0000 UTC m=+156.690606289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.273922 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.278499 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.325197 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.325917 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.825896779 +0000 UTC m=+156.807859965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.337662 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.374807 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sgdgm" event={"ID":"37e5c807-b97c-4317-83a8-8266feac9df8","Type":"ContainerStarted","Data":"3e1dea57fc56c3427579d8431e5f825e00ae7800738a903c604e3efab909e8af"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.397363 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" event={"ID":"209b3283-9e92-472c-bc99-10d9d56788b2","Type":"ContainerStarted","Data":"d6f71afe338d057b904f855e366d97a11057b3b4b2c7afb2c6155cf710814089"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.403317 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" event={"ID":"cb575284-6b4b-49ab-b314-ba85d494ef6c","Type":"ContainerStarted","Data":"ee52260172dd84dd98f501d4cd63a6fb00647f78f8155439ff16d9bedfb34a12"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.412579 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" event={"ID":"5f2f6be3-04c5-4f5f-a545-b30d9e652982","Type":"ContainerStarted","Data":"2421021f0ccebf581bf29b0f1b9a8bb0c0acfe6d073b20e971e33a7dc5c2364d"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.422551 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-w9bvf" podStartSLOduration=136.422534923 podStartE2EDuration="2m16.422534923s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:42.421892446 +0000 UTC m=+156.403855642" watchObservedRunningTime="2026-01-21 17:58:42.422534923 +0000 UTC m=+156.404498109" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.422688 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-sgdgm" podStartSLOduration=7.422667636 podStartE2EDuration="7.422667636s" podCreationTimestamp="2026-01-21 17:58:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:42.398378511 +0000 UTC m=+156.380341697" watchObservedRunningTime="2026-01-21 17:58:42.422667636 +0000 UTC m=+156.404630822" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.426724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.427118 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:42.927102806 +0000 UTC m=+156.909065992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.452998 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" event={"ID":"5ba25763-8b03-46c7-bc29-e401dd42266c","Type":"ContainerStarted","Data":"4fb9cf1cb7b798675504e21592db525c99336ba77380493f9aade78996b66554"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.461147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" event={"ID":"8106ce05-fe62-4b3e-93fd-0acb505d4e4b","Type":"ContainerStarted","Data":"b1bf73c6e409c040c733b58563027afea9a930923b09f5228d2cf8e73010a338"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.462773 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" event={"ID":"940eecee-282d-4cf5-b296-0a9b63ea3d95","Type":"ContainerStarted","Data":"e1c82d3b3f071ff0656989f177c3c5c53901941bacc8318840a2cdb0d3b994ee"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.464491 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zhchl" podStartSLOduration=135.464472496 podStartE2EDuration="2m15.464472496s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:42.461993104 +0000 UTC m=+156.443956290" watchObservedRunningTime="2026-01-21 17:58:42.464472496 +0000 UTC m=+156.446435692" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.475717 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" event={"ID":"c5c2333c-edb3-4c41-9780-f3bc5c60ce18","Type":"ContainerStarted","Data":"d95ba76ac87023ea416a36fd903af97b66d06ef04c4d702152e654a9120dbed7"} Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.475918 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dz4cq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.475953 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.477554 4792 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-42ml5 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.477588 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.526275 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.528169 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.529864 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.02982564 +0000 UTC m=+157.011788826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.526326 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.559086 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.576240 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d869q"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.578628 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.593569 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-65vgm"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.624375 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mrvfm"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.630444 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.631544 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.635655 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kzpjg"] Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.635997 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.135982631 +0000 UTC m=+157.117945817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.653276 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.663422 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.676917 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.681101 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.683104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.687407 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q7mdj"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.690614 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-w784k"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.694232 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.697199 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.698994 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.700478 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xpn4x"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.701908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.731622 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.731784 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.231762032 +0000 UTC m=+157.213725218 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.731908 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.732306 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.232276196 +0000 UTC m=+157.214239382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.817233 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1166298c_e29b_4c0a_b153_d92acf85b0d2.slice/crio-d3ca9cfc0747b5a6a35c91282b3eb87fd7e16705be660addced132da474c6386 WatchSource:0}: Error finding container d3ca9cfc0747b5a6a35c91282b3eb87fd7e16705be660addced132da474c6386: Status 404 returned error can't find the container with id d3ca9cfc0747b5a6a35c91282b3eb87fd7e16705be660addced132da474c6386 Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.829995 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ts9kq"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.835700 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-jt4rr"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.836380 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.836916 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.336890188 +0000 UTC m=+157.318853374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.844898 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6nxvx"] Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.845042 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc181f61f_a9d3_4d0e_84d2_aa2bd560017b.slice/crio-31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0 WatchSource:0}: Error finding container 31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0: Status 404 returned error can't find the container with id 31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0 Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.848590 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1f83bd3_b657_494d_86b4_0981418d9c8d.slice/crio-526c26b2391ac4fe43375f3877a878962ef890d7185422450092f7e7ebb77b79 WatchSource:0}: Error finding container 526c26b2391ac4fe43375f3877a878962ef890d7185422450092f7e7ebb77b79: Status 404 returned error can't find the container with id 526c26b2391ac4fe43375f3877a878962ef890d7185422450092f7e7ebb77b79 Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.851113 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4386205_1780_4ef2_a790_edab679ee8a7.slice/crio-8d64e8f58dfcb0db0a96ebdc6aa6cdff02a1c828c0666255deec8cdf0299eee8 WatchSource:0}: Error finding container 8d64e8f58dfcb0db0a96ebdc6aa6cdff02a1c828c0666255deec8cdf0299eee8: Status 404 returned error can't find the container with id 8d64e8f58dfcb0db0a96ebdc6aa6cdff02a1c828c0666255deec8cdf0299eee8 Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.859976 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.864401 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87668f27_58db_4e11_b03d_9e9c5fd8f35c.slice/crio-15e7ee08fa390a3001bbcc652d071fe91569db7953b9e5f4f5e67f6c57112819 WatchSource:0}: Error finding container 15e7ee08fa390a3001bbcc652d071fe91569db7953b9e5f4f5e67f6c57112819: Status 404 returned error can't find the container with id 15e7ee08fa390a3001bbcc652d071fe91569db7953b9e5f4f5e67f6c57112819 Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.879523 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod148627d0_5541_4e87_9b4c_cf22303d76b5.slice/crio-be500201a38f4a93f45c66a5108b5edf792c43a113adcd989a3013141b75c210 WatchSource:0}: Error finding container be500201a38f4a93f45c66a5108b5edf792c43a113adcd989a3013141b75c210: Status 404 returned error can't find the container with id be500201a38f4a93f45c66a5108b5edf792c43a113adcd989a3013141b75c210 Jan 21 17:58:42 crc kubenswrapper[4792]: W0121 17:58:42.880367 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc380d9c_2633_4904_8225_bd456fdb103b.slice/crio-706eac472f6975f631dd3f5c5fd4ff1a3a0c2b2200f1316a22ccf8e96d5b9f8e WatchSource:0}: Error finding container 706eac472f6975f631dd3f5c5fd4ff1a3a0c2b2200f1316a22ccf8e96d5b9f8e: Status 404 returned error can't find the container with id 706eac472f6975f631dd3f5c5fd4ff1a3a0c2b2200f1316a22ccf8e96d5b9f8e Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.882934 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-srb5s"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.893480 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.916814 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.934720 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl"] Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.938327 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:42 crc kubenswrapper[4792]: E0121 17:58:42.938944 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.438918205 +0000 UTC m=+157.420881391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:42 crc kubenswrapper[4792]: I0121 17:58:42.954744 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.040054 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.040196 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.540169574 +0000 UTC m=+157.522132750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.040298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.040587 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.540579754 +0000 UTC m=+157.522542940 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.141260 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.141566 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.641510374 +0000 UTC m=+157.623473570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.141683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.142084 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.642067468 +0000 UTC m=+157.624030654 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.243124 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.243602 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.743581282 +0000 UTC m=+157.725544468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.345168 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.345489 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.845473657 +0000 UTC m=+157.827436843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.446473 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.446676 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.946645873 +0000 UTC m=+157.928609059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.450527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.450919 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:43.950899769 +0000 UTC m=+157.932862955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.458573 4792 csr.go:261] certificate signing request csr-p7gf5 is approved, waiting to be issued Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.467763 4792 csr.go:257] certificate signing request csr-p7gf5 is issued Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.494701 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" event={"ID":"f8faafed-92b2-4ebc-9d37-9b7fc1d32067","Type":"ContainerStarted","Data":"8098047ff1ddcca2289557bd000b820c343bc26027422c3d1ac6fdc628f27d87"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.496272 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" event={"ID":"2dc72b61-a3c4-4a8a-b969-94f9a9036946","Type":"ContainerStarted","Data":"03fdfc7430ed91f06799a7640046ea0e03a7371eef2b29e7c1fe06714c538ffe"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.498946 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" event={"ID":"cc380d9c-2633-4904-8225-bd456fdb103b","Type":"ContainerStarted","Data":"706eac472f6975f631dd3f5c5fd4ff1a3a0c2b2200f1316a22ccf8e96d5b9f8e"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.501123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-w784k" event={"ID":"148627d0-5541-4e87-9b4c-cf22303d76b5","Type":"ContainerStarted","Data":"be500201a38f4a93f45c66a5108b5edf792c43a113adcd989a3013141b75c210"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.503109 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" event={"ID":"aa38141d-14db-436f-b9f7-9270a301ef1e","Type":"ContainerStarted","Data":"5b5d5d761d4f84372c8dd53200150c41a9a9e377916394cc17df9a185905840a"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.504467 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" event={"ID":"87668f27-58db-4e11-b03d-9e9c5fd8f35c","Type":"ContainerStarted","Data":"15e7ee08fa390a3001bbcc652d071fe91569db7953b9e5f4f5e67f6c57112819"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.506307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" event={"ID":"bdbd468f-304e-429c-9957-e60c6e756db4","Type":"ContainerStarted","Data":"c63c8651ec1c93e6eb58273479ac6eb5d967077f5266faaec845a42c077da6e8"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.509133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nxvx" event={"ID":"a0ea4efe-8d03-4090-b537-344474f2cdac","Type":"ContainerStarted","Data":"f02170ba28493d73b1b000a999a450cd5952bb8a30c33ec245eb019bf87975dd"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.510834 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" event={"ID":"72615ab0-4d57-4849-89f9-3f660e97825b","Type":"ContainerStarted","Data":"f778d89c9f491c59fbb986f449ad96aeea0d01051acaf8b7284966bfc195da25"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.512045 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" event={"ID":"f1f83bd3-b657-494d-86b4-0981418d9c8d","Type":"ContainerStarted","Data":"526c26b2391ac4fe43375f3877a878962ef890d7185422450092f7e7ebb77b79"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.513968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" event={"ID":"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5","Type":"ContainerStarted","Data":"a81f121ad992672c483502cbcd9fcd4768172216dd6fef8563469688378b399f"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.515425 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" event={"ID":"5be3fc9c-cfde-41ee-8383-02800d3990cd","Type":"ContainerStarted","Data":"d5f10ca4860eb7c2bb96092b86d81ecf828c7a2ef5e35edd0001dd5243a5d186"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.517048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" event={"ID":"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1","Type":"ContainerStarted","Data":"f97cd0d37a983c51ab543cfdbeae52146425749ad7709041f9d35de23dfebdee"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.518491 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-65vgm" event={"ID":"1166298c-e29b-4c0a-b153-d92acf85b0d2","Type":"ContainerStarted","Data":"d3ca9cfc0747b5a6a35c91282b3eb87fd7e16705be660addced132da474c6386"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.520209 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" event={"ID":"5f2f6be3-04c5-4f5f-a545-b30d9e652982","Type":"ContainerStarted","Data":"82cc1a41d68afaef1aba8168c50047ed44e0fbb47ed269d3c08259f8abdf834f"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.521571 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" event={"ID":"4a02b713-1a2c-43d9-9ed2-de57e40a2364","Type":"ContainerStarted","Data":"91f729c9bdd3d2a336ef80c510e38695dafa7ab37c0ede81f7853858abf6e817"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.523150 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" event={"ID":"c705c522-8d1a-4f0e-9d7a-671489591029","Type":"ContainerStarted","Data":"c081d4128f8ba8f724c389c137c17c52ed5401a935d2c8a7777d9e41ab91054a"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.528426 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.528474 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.531151 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" event={"ID":"a4386205-1780-4ef2-a790-edab679ee8a7","Type":"ContainerStarted","Data":"8d64e8f58dfcb0db0a96ebdc6aa6cdff02a1c828c0666255deec8cdf0299eee8"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.534048 4792 generic.go:334] "Generic (PLEG): container finished" podID="940eecee-282d-4cf5-b296-0a9b63ea3d95" containerID="e1c82d3b3f071ff0656989f177c3c5c53901941bacc8318840a2cdb0d3b994ee" exitCode=0 Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.534459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" event={"ID":"940eecee-282d-4cf5-b296-0a9b63ea3d95","Type":"ContainerDied","Data":"e1c82d3b3f071ff0656989f177c3c5c53901941bacc8318840a2cdb0d3b994ee"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.536887 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" event={"ID":"387582b8-e641-4519-a20f-794c23707b51","Type":"ContainerStarted","Data":"e1f40e867987221c2d15c795b61dbd8e8107dd65b26d59f315c6c325b11aba7e"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.540598 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" event={"ID":"c181f61f-a9d3-4d0e-84d2-aa2bd560017b","Type":"ContainerStarted","Data":"31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.542857 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerStarted","Data":"1b8fb3fb2dbe28425caa26ad4dc03a7f97b168625312a78d014395861801c968"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.544193 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" event={"ID":"0221af58-b758-43bd-8c44-5af77ab00967","Type":"ContainerStarted","Data":"d22179ded7ac656afe13ba018fd4b5bf44c30d79410eaf8036a0d483485a6b15"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.545760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" event={"ID":"26aa0d14-edd4-44c6-84d8-86b8361adfbd","Type":"ContainerStarted","Data":"b27af0a0955768561694decdd6907d80a18ab3e3e356b70c51131e0beeddfdd7"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.547144 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" event={"ID":"8106ce05-fe62-4b3e-93fd-0acb505d4e4b","Type":"ContainerStarted","Data":"50c2d17f261cb3195a713673752acf05479d5486ba63a2120ad8a41db83f2674"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.548829 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" event={"ID":"3d05043f-32a0-4b55-ac55-3f48b3c25bc5","Type":"ContainerStarted","Data":"97cec04f65cf93a1c7a49de63c0edbb097ded7e7cfe7c62307ae35438c2bc486"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.550270 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" event={"ID":"4352f184-e8fe-408e-b28b-db2cd474622f","Type":"ContainerStarted","Data":"8e1b119182bae02fc9e1c858b9fcbcfd656c9dfc099f4fc41f851693818f9511"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.551211 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.551681 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.051659315 +0000 UTC m=+158.033622501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.559188 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" event={"ID":"5ba25763-8b03-46c7-bc29-e401dd42266c","Type":"ContainerStarted","Data":"97ea117bbed18794191b2ad68497d364d0af736bc9635a1fcf32c04fc8048676"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.561029 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" event={"ID":"d14cc544-196a-41ec-aa6e-ffca799481e7","Type":"ContainerStarted","Data":"fdec36052dd06980e85e11320e454dc3f3313c202c0cfdf2dc1f8a664b69ba40"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.563461 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerStarted","Data":"3f6770720a48e2918b1cc3bbf02c0f2cb9eadce225da564cf677e9e6ca443588"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.564553 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" event={"ID":"e668a4f5-eb70-407c-91de-67acaaa1e03a","Type":"ContainerStarted","Data":"0718802e1fa972f757c4139072fe26a8bc07a776e57a88122a3df7827cb70e7a"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.565922 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" event={"ID":"7bf30f38-58cd-46e3-b3af-4f2c47124d2a","Type":"ContainerStarted","Data":"6209a33bbdce832e424e0c57e80e31960fb516cb9fe1638b87d6ea59704ab1bb"} Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.573517 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cvxjv" podStartSLOduration=136.573491898 podStartE2EDuration="2m16.573491898s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:43.572291288 +0000 UTC m=+157.554254494" watchObservedRunningTime="2026-01-21 17:58:43.573491898 +0000 UTC m=+157.555455084" Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.654525 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.654829 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.15481086 +0000 UTC m=+158.136774046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.685478 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.756657 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.756995 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.25692992 +0000 UTC m=+158.238893136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.757305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.757786 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.25775613 +0000 UTC m=+158.239719336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.858749 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.859118 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.359084381 +0000 UTC m=+158.341047567 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.859173 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.859579 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.359569203 +0000 UTC m=+158.341532399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.960831 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.961043 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.461010685 +0000 UTC m=+158.442973871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:43 crc kubenswrapper[4792]: I0121 17:58:43.961501 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:43 crc kubenswrapper[4792]: E0121 17:58:43.961815 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.461802706 +0000 UTC m=+158.443765892 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.063099 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.063489 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.563470834 +0000 UTC m=+158.545434020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.165760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.166382 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.666353833 +0000 UTC m=+158.648317059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.268036 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.268300 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.768262298 +0000 UTC m=+158.750225494 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.268584 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.269064 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.769044816 +0000 UTC m=+158.751007992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.369662 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.369889 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.869840923 +0000 UTC m=+158.851804109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.370693 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.371140 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.871131516 +0000 UTC m=+158.853094702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.469797 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-21 17:53:43 +0000 UTC, rotation deadline is 2026-10-22 19:59:00.441994504 +0000 UTC Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.469877 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6578h0m15.972123962s for next certificate rotation Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.472164 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.472387 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.972351554 +0000 UTC m=+158.954314740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.472551 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.473020 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:44.973003709 +0000 UTC m=+158.954966895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.529273 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.529616 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.573682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.573886 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.073831987 +0000 UTC m=+159.055795173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.574060 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.574504 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.074494014 +0000 UTC m=+159.056457210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.675111 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.675395 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.175350262 +0000 UTC m=+159.157313448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.675635 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.675964 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.175946857 +0000 UTC m=+159.157910043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.776991 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.777243 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.277200015 +0000 UTC m=+159.259163201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.777785 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.778382 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.278353304 +0000 UTC m=+159.260316490 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.879312 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.879570 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.37952779 +0000 UTC m=+159.361490976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.879612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.880083 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.380074034 +0000 UTC m=+159.362037220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.981301 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.981805 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.481767513 +0000 UTC m=+159.463730789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:44 crc kubenswrapper[4792]: I0121 17:58:44.981988 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:44 crc kubenswrapper[4792]: E0121 17:58:44.982470 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.48245004 +0000 UTC m=+159.464413236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.083497 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.083795 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.583750939 +0000 UTC m=+159.565714145 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.084068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.084649 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.584622461 +0000 UTC m=+159.566585647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.185162 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.185466 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.685418498 +0000 UTC m=+159.667381694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.185577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.185999 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.685980902 +0000 UTC m=+159.667944088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.286680 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.286977 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.786927312 +0000 UTC m=+159.768890498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.287464 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.287993 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.787982919 +0000 UTC m=+159.769946105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.389105 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.389369 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.88932797 +0000 UTC m=+159.871291156 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.389496 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.389941 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.889923734 +0000 UTC m=+159.871886940 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.490981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.491274 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.991229354 +0000 UTC m=+159.973192540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.491368 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.491724 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:45.991706686 +0000 UTC m=+159.973669872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.526790 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.527014 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.582568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" event={"ID":"526cf46b-9c0e-4ea2-850f-e9fba77a9449","Type":"ContainerStarted","Data":"6c39cb881dc9a2b7975c0777899308a0fd27b700938c0beb8a1ba127d9ff2736"} Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.592492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.592926 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.092906153 +0000 UTC m=+160.074869339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.696895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.697272 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.197259778 +0000 UTC m=+160.179222964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.797987 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.798114 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.298087825 +0000 UTC m=+160.280051011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.798419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.798915 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.298905506 +0000 UTC m=+160.280868692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.899551 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.899792 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.399744264 +0000 UTC m=+160.381707450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:45 crc kubenswrapper[4792]: I0121 17:58:45.899950 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:45 crc kubenswrapper[4792]: E0121 17:58:45.900410 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.400400711 +0000 UTC m=+160.382363907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.001588 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.001894 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.501840243 +0000 UTC m=+160.483803429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.002014 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.002435 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.502416957 +0000 UTC m=+160.484380143 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.103056 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.103516 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.603493292 +0000 UTC m=+160.585456498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.204287 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.204656 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.704636667 +0000 UTC m=+160.686599863 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.306286 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.307191 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.807172567 +0000 UTC m=+160.789135753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.410818 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.411269 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:46.911252685 +0000 UTC m=+160.893215871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.514508 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.514719 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.014692868 +0000 UTC m=+160.996656054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.514807 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.515232 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.015216672 +0000 UTC m=+160.997179868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.536298 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:46 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:46 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:46 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.536366 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.591152 4792 generic.go:334] "Generic (PLEG): container finished" podID="7bf30f38-58cd-46e3-b3af-4f2c47124d2a" containerID="9ada73eff80564cc45314baec601f69af4580f60e4f346192c0a25de20129fa5" exitCode=0 Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.591255 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" event={"ID":"7bf30f38-58cd-46e3-b3af-4f2c47124d2a","Type":"ContainerDied","Data":"9ada73eff80564cc45314baec601f69af4580f60e4f346192c0a25de20129fa5"} Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.594741 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" event={"ID":"c5c2333c-edb3-4c41-9780-f3bc5c60ce18","Type":"ContainerStarted","Data":"db226635d604fec4833e68c2472ed4d1ea5c286b0140f3de6a398a34e20e51f7"} Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.596952 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" event={"ID":"e668a4f5-eb70-407c-91de-67acaaa1e03a","Type":"ContainerStarted","Data":"c1895df1af44651130616177b2f3345cafee2ec5b944d5c5cafec831a68415d2"} Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.597301 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.611425 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.617937 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.618453 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.118423638 +0000 UTC m=+161.100386824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.720196 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.722698 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.222681321 +0000 UTC m=+161.204644577 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.821273 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.821723 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.321693804 +0000 UTC m=+161.303656990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:46 crc kubenswrapper[4792]: I0121 17:58:46.922666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:46 crc kubenswrapper[4792]: E0121 17:58:46.923328 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.423295101 +0000 UTC m=+161.405258457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.009453 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-g2vzr" podStartSLOduration=140.009417873 podStartE2EDuration="2m20.009417873s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:47.005473695 +0000 UTC m=+160.987436881" watchObservedRunningTime="2026-01-21 17:58:47.009417873 +0000 UTC m=+160.991381059" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.023731 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8xdnr" podStartSLOduration=140.023707518 podStartE2EDuration="2m20.023707518s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:47.021472262 +0000 UTC m=+161.003435448" watchObservedRunningTime="2026-01-21 17:58:47.023707518 +0000 UTC m=+161.005670704" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.024317 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.024516 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.524495207 +0000 UTC m=+161.506458393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.024697 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.025292 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.525268297 +0000 UTC m=+161.507231483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.125623 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.125904 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.625842839 +0000 UTC m=+161.607806035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.126166 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.126673 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.626656138 +0000 UTC m=+161.608619324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.227157 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.227380 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.727351793 +0000 UTC m=+161.709314979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.227594 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.228010 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.727998859 +0000 UTC m=+161.709962245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.329181 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.329490 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.829440722 +0000 UTC m=+161.811403908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.329805 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.330144 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.830130179 +0000 UTC m=+161.812093365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.430966 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.431538 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:47.93150926 +0000 UTC m=+161.913472456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.510526 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.531007 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:47 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:47 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:47 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.531062 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.538373 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.538781 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.038765038 +0000 UTC m=+162.020728224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.615014 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" event={"ID":"c705c522-8d1a-4f0e-9d7a-671489591029","Type":"ContainerStarted","Data":"a1cb8e6d6a48e553b6bf8ba45a2a87ddf8343c49f61b420e21bb7445a5854893"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.626647 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" event={"ID":"a4386205-1780-4ef2-a790-edab679ee8a7","Type":"ContainerStarted","Data":"5b504cbf2efc26148140743fdde42333a5e7a6c0e07305232332b402ed17cfa0"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.639987 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.640184 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.14016075 +0000 UTC m=+162.122123936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.640283 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.641043 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.141026462 +0000 UTC m=+162.122989648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.652539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" event={"ID":"5be3fc9c-cfde-41ee-8383-02800d3990cd","Type":"ContainerStarted","Data":"8dcc068e9e90f36a1c02866b70746afeb26413630fac2f30ff757b7ef9ddaaa7"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.654825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" event={"ID":"4a02b713-1a2c-43d9-9ed2-de57e40a2364","Type":"ContainerStarted","Data":"0222ee1077b295cdd6194355ea2ea13a9a34bd19225c372c0d1afe1feb57fc89"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.657684 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerStarted","Data":"89176767e2d7ab24fa9279370842d98d235f15b1682693406c811d7fa8abb730"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.659294 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" event={"ID":"bdbd468f-304e-429c-9957-e60c6e756db4","Type":"ContainerStarted","Data":"53346e5d896487d2e9c9a71ad9097578ec9de5539c2468d9200e23fad7371edb"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.661036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-w784k" event={"ID":"148627d0-5541-4e87-9b4c-cf22303d76b5","Type":"ContainerStarted","Data":"7984df616211cf38c9a0eb17298bfecd6c1941cdd2b2e197ba37a70e3dcb2c98"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.665190 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" event={"ID":"cc380d9c-2633-4904-8225-bd456fdb103b","Type":"ContainerStarted","Data":"122b1bd4b67b9812a70101adbffd38b5e2b609320950dd2eeadaaf1b4f509c46"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.674667 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" event={"ID":"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1","Type":"ContainerStarted","Data":"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.675983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" event={"ID":"0221af58-b758-43bd-8c44-5af77ab00967","Type":"ContainerStarted","Data":"48b4ccf6ed166d398bfce5926b5edc57f93e58e43b51dec02dc3d1292865849a"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.687203 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7w8k4" podStartSLOduration=140.68718666 podStartE2EDuration="2m20.68718666s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:47.685128178 +0000 UTC m=+161.667091374" watchObservedRunningTime="2026-01-21 17:58:47.68718666 +0000 UTC m=+161.669149846" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.690500 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" event={"ID":"d14cc544-196a-41ec-aa6e-ffca799481e7","Type":"ContainerStarted","Data":"461547e5bba0ee43a552ccb5f15d81ec1e0caa2c23dff58fe4feda19f5164f75"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.712087 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" event={"ID":"2dc72b61-a3c4-4a8a-b969-94f9a9036946","Type":"ContainerStarted","Data":"f1d9e5a489003fde8975320f6498a4b0d67ae6e10a1d456ed69663dd4a4c3c76"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.735185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" event={"ID":"26aa0d14-edd4-44c6-84d8-86b8361adfbd","Type":"ContainerStarted","Data":"bf80b9a4d935ae8954d0ab8e73621e37111d13d123da76df8ee0a038ce5edfa6"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.737865 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" event={"ID":"f8faafed-92b2-4ebc-9d37-9b7fc1d32067","Type":"ContainerStarted","Data":"16c897951559c02308699368c31224594658da1258f05a9870692a523ea21f45"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.741254 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.741562 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.241546622 +0000 UTC m=+162.223509808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.748251 4792 generic.go:334] "Generic (PLEG): container finished" podID="e668a4f5-eb70-407c-91de-67acaaa1e03a" containerID="c1895df1af44651130616177b2f3345cafee2ec5b944d5c5cafec831a68415d2" exitCode=0 Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.749005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" event={"ID":"e668a4f5-eb70-407c-91de-67acaaa1e03a","Type":"ContainerDied","Data":"c1895df1af44651130616177b2f3345cafee2ec5b944d5c5cafec831a68415d2"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.764699 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" event={"ID":"526cf46b-9c0e-4ea2-850f-e9fba77a9449","Type":"ContainerStarted","Data":"5a893df0fe540a43fd341405cd67b9730f21f04b495ab075bbc9a19ad074c9c3"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.784411 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerStarted","Data":"ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.787250 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" event={"ID":"3d05043f-32a0-4b55-ac55-3f48b3c25bc5","Type":"ContainerStarted","Data":"f13dfa9aff2b4ce6037b6bc8b90247435afc2a2272f5ca2c6b38047e5e0c1fdb"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.821343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" event={"ID":"72615ab0-4d57-4849-89f9-3f660e97825b","Type":"ContainerStarted","Data":"aaae8199faa5e909bfea1dd2a7215c29730742556b93cf19540608770675b2ee"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.838492 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" event={"ID":"f1f83bd3-b657-494d-86b4-0981418d9c8d","Type":"ContainerStarted","Data":"29f8a8712bcc22ba18e02d75a8afdb537ae9596e19d2a10d96accf243826c91a"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.844370 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.846790 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.346762289 +0000 UTC m=+162.328725475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.857315 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-65vgm" event={"ID":"1166298c-e29b-4c0a-b153-d92acf85b0d2","Type":"ContainerStarted","Data":"a048099720e087d5d3684c6bb66895ee2d6182ded9112e8aca3d93ea4538c3c1"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.871766 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" event={"ID":"68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5","Type":"ContainerStarted","Data":"f1aa0bd2bfee36065d3eba9ca9eb5cc4bb3f0ccad6ee9c5b10e7f6f26d18771c"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.884333 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" event={"ID":"4352f184-e8fe-408e-b28b-db2cd474622f","Type":"ContainerStarted","Data":"3f3c087ca284e9e81acdd24952d7a6f461d0b35699e362602a4c6ea31a3497df"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.898115 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" event={"ID":"c181f61f-a9d3-4d0e-84d2-aa2bd560017b","Type":"ContainerStarted","Data":"7f565a65bf855b0ebfd5423f864d2872a7d30231d2c8736d4c1bfc616cb3d67b"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.898195 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-qq7s6" podStartSLOduration=140.898165586 podStartE2EDuration="2m20.898165586s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:47.814118216 +0000 UTC m=+161.796081422" watchObservedRunningTime="2026-01-21 17:58:47.898165586 +0000 UTC m=+161.880128772" Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.900964 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" event={"ID":"87668f27-58db-4e11-b03d-9e9c5fd8f35c","Type":"ContainerStarted","Data":"dcbf60d9b9084e1ecf76e7dd75b3a82d1104c3268e28e0c447b4f37eb225dcd8"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.907090 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nxvx" event={"ID":"a0ea4efe-8d03-4090-b537-344474f2cdac","Type":"ContainerStarted","Data":"a54ca7855ea1793452eeaa8a8499bb8d8b3e59ec07293fdd1270c1b5feeedf42"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.915924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" event={"ID":"940eecee-282d-4cf5-b296-0a9b63ea3d95","Type":"ContainerStarted","Data":"f1f0e4bf80d6e632a26873469a7680d2a5d294ed8e0e2f946208494ee8862c14"} Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.972516 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:47 crc kubenswrapper[4792]: E0121 17:58:47.973545 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.473530071 +0000 UTC m=+162.455493257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:47 crc kubenswrapper[4792]: I0121 17:58:47.978199 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-65vgm" podStartSLOduration=140.978185427 podStartE2EDuration="2m20.978185427s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:47.969535061 +0000 UTC m=+161.951498247" watchObservedRunningTime="2026-01-21 17:58:47.978185427 +0000 UTC m=+161.960148613" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.074927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.079551 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.579537168 +0000 UTC m=+162.561500354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.157412 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.176457 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.176805 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.676790936 +0000 UTC m=+162.658754112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.235086 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-vs6k9" podStartSLOduration=141.235061966 podStartE2EDuration="2m21.235061966s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:48.063034717 +0000 UTC m=+162.044997913" watchObservedRunningTime="2026-01-21 17:58:48.235061966 +0000 UTC m=+162.217025172" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.280003 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.281409 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.781395498 +0000 UTC m=+162.763358684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.352601 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-r49jw" podStartSLOduration=141.352581439 podStartE2EDuration="2m21.352581439s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:48.352568598 +0000 UTC m=+162.334531794" watchObservedRunningTime="2026-01-21 17:58:48.352581439 +0000 UTC m=+162.334544625" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.386885 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.387206 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.88718791 +0000 UTC m=+162.869151096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.411832 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" podStartSLOduration=141.411797921 podStartE2EDuration="2m21.411797921s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:48.407264469 +0000 UTC m=+162.389227655" watchObservedRunningTime="2026-01-21 17:58:48.411797921 +0000 UTC m=+162.393761117" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.474833 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.474974 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.478043 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-h99r2" podStartSLOduration=141.478030379 podStartE2EDuration="2m21.478030379s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:48.476631154 +0000 UTC m=+162.458594350" watchObservedRunningTime="2026-01-21 17:58:48.478030379 +0000 UTC m=+162.459993555" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.493759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.494612 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:48.99459139 +0000 UTC m=+162.976554576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.494943 4792 patch_prober.go:28] interesting pod/console-f9d7485db-65vgm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.495005 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-65vgm" podUID="1166298c-e29b-4c0a-b153-d92acf85b0d2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.528615 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.534936 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:48 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:48 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:48 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.535002 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.602452 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.602818 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.102790862 +0000 UTC m=+163.084754048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.603279 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.603687 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.103677564 +0000 UTC m=+163.085640750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.704791 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.706019 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.206002648 +0000 UTC m=+163.187965834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.807143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.807510 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.307495013 +0000 UTC m=+163.289458199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.908042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:48 crc kubenswrapper[4792]: E0121 17:58:48.908510 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.408488155 +0000 UTC m=+163.390451351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:48 crc kubenswrapper[4792]: I0121 17:58:48.957800 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" event={"ID":"bdbd468f-304e-429c-9957-e60c6e756db4","Type":"ContainerStarted","Data":"3459b6d660588be34d6e9632c331eaca30fe232ef04499117f9db22d5769f330"} Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.015810 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.016375 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.516354457 +0000 UTC m=+163.498317653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.033159 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" event={"ID":"f1f83bd3-b657-494d-86b4-0981418d9c8d","Type":"ContainerStarted","Data":"b8f67114ba06ac5c05ab2297e89bf5064060b5160c0be4ce735e22bbd043edce"} Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.059995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" event={"ID":"c705c522-8d1a-4f0e-9d7a-671489591029","Type":"ContainerStarted","Data":"c0ebf21af409f7564c07a93d1484ed8b4d6e8ffda9e3d732e46adb124642c5ec"} Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.060683 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.080717 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6nxvx" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.126308 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d869q" podStartSLOduration=142.126291942 podStartE2EDuration="2m22.126291942s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.017213319 +0000 UTC m=+162.999176505" watchObservedRunningTime="2026-01-21 17:58:49.126291942 +0000 UTC m=+163.108255128" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.127016 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.128089 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22hnh" podStartSLOduration=142.128075656 podStartE2EDuration="2m22.128075656s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.125946903 +0000 UTC m=+163.107910089" watchObservedRunningTime="2026-01-21 17:58:49.128075656 +0000 UTC m=+163.110038842" Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.128866 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.628815104 +0000 UTC m=+163.610778290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.179935 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6nxvx" podStartSLOduration=14.179909946 podStartE2EDuration="14.179909946s" podCreationTimestamp="2026-01-21 17:58:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.165071106 +0000 UTC m=+163.147034312" watchObservedRunningTime="2026-01-21 17:58:49.179909946 +0000 UTC m=+163.161873142" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.185720 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" event={"ID":"3d05043f-32a0-4b55-ac55-3f48b3c25bc5","Type":"ContainerStarted","Data":"ffd1419994c1a7c991c5c8f6b625a63696c498808f1dc0d28436a3180cdf340a"} Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.254504 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.75446653 +0000 UTC m=+163.736429716 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.260476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.287951 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" podStartSLOduration=142.287929422 podStartE2EDuration="2m22.287929422s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.286703971 +0000 UTC m=+163.268667167" watchObservedRunningTime="2026-01-21 17:58:49.287929422 +0000 UTC m=+163.269892618" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.294779 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.319582 4792 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-5jx5t container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" start-of-body= Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.319646 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" podUID="68af26fe-1f4b-4a2b-9ee2-4a7e4b1499d5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.319964 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.320003 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.320019 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.324388 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.324481 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.330579 4792 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-nm5x9 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.330655 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" podUID="4352f184-e8fe-408e-b28b-db2cd474622f" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.330802 4792 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-bxqhq container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" start-of-body= Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.330824 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.356056 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-mrvfm" podStartSLOduration=142.356036466 podStartE2EDuration="2m22.356036466s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.354472588 +0000 UTC m=+163.336435774" watchObservedRunningTime="2026-01-21 17:58:49.356036466 +0000 UTC m=+163.337999662" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.362971 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.365395 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.865364038 +0000 UTC m=+163.847327224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.400164 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-jt4rr" podStartSLOduration=142.400144683 podStartE2EDuration="2m22.400144683s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.400099242 +0000 UTC m=+163.382062438" watchObservedRunningTime="2026-01-21 17:58:49.400144683 +0000 UTC m=+163.382107869" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.460503 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" podStartSLOduration=143.460487324 podStartE2EDuration="2m23.460487324s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.457296934 +0000 UTC m=+163.439260120" watchObservedRunningTime="2026-01-21 17:58:49.460487324 +0000 UTC m=+163.442450510" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.469658 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.470100 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:49.970078932 +0000 UTC m=+163.952042118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.485556 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" podStartSLOduration=142.485535007 podStartE2EDuration="2m22.485535007s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.484639964 +0000 UTC m=+163.466603150" watchObservedRunningTime="2026-01-21 17:58:49.485535007 +0000 UTC m=+163.467498193" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.517136 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-srb5s" podStartSLOduration=143.517110192 podStartE2EDuration="2m23.517110192s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.514058386 +0000 UTC m=+163.496021572" watchObservedRunningTime="2026-01-21 17:58:49.517110192 +0000 UTC m=+163.499073378" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.535081 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:49 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:49 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:49 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.535171 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.536654 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.556828 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4nscl" podStartSLOduration=142.556804949 podStartE2EDuration="2m22.556804949s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.540628966 +0000 UTC m=+163.522592162" watchObservedRunningTime="2026-01-21 17:58:49.556804949 +0000 UTC m=+163.538768135" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.558763 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2zfjc" podStartSLOduration=142.558753947 podStartE2EDuration="2m22.558753947s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.557208769 +0000 UTC m=+163.539171985" watchObservedRunningTime="2026-01-21 17:58:49.558753947 +0000 UTC m=+163.540717153" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.604608 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.604818 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.104786972 +0000 UTC m=+164.086750168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.604895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.605161 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.105151932 +0000 UTC m=+164.087115118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.617037 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" podStartSLOduration=142.617013127 podStartE2EDuration="2m22.617013127s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.612000822 +0000 UTC m=+163.593964018" watchObservedRunningTime="2026-01-21 17:58:49.617013127 +0000 UTC m=+163.598976323" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.669079 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" podStartSLOduration=142.669059321 podStartE2EDuration="2m22.669059321s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.651465004 +0000 UTC m=+163.633428190" watchObservedRunningTime="2026-01-21 17:58:49.669059321 +0000 UTC m=+163.651022507" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.685058 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podStartSLOduration=142.685038349 podStartE2EDuration="2m22.685038349s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.670580949 +0000 UTC m=+163.652544145" watchObservedRunningTime="2026-01-21 17:58:49.685038349 +0000 UTC m=+163.667001535" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.688426 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-w784k" podStartSLOduration=14.688408272 podStartE2EDuration="14.688408272s" podCreationTimestamp="2026-01-21 17:58:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.686104415 +0000 UTC m=+163.668067601" watchObservedRunningTime="2026-01-21 17:58:49.688408272 +0000 UTC m=+163.670371458" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.706143 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.707305 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.207282181 +0000 UTC m=+164.189245387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.727105 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" podStartSLOduration=142.727083515 podStartE2EDuration="2m22.727083515s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.723060154 +0000 UTC m=+163.705023360" watchObservedRunningTime="2026-01-21 17:58:49.727083515 +0000 UTC m=+163.709046711" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.750383 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" podStartSLOduration=142.750360653 podStartE2EDuration="2m22.750360653s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.749064011 +0000 UTC m=+163.731027217" watchObservedRunningTime="2026-01-21 17:58:49.750360653 +0000 UTC m=+163.732323849" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.797382 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-xpn4x" podStartSLOduration=142.797357002 podStartE2EDuration="2m22.797357002s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.795056614 +0000 UTC m=+163.777019810" watchObservedRunningTime="2026-01-21 17:58:49.797357002 +0000 UTC m=+163.779320198" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.808201 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.808539 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.30852456 +0000 UTC m=+164.290487746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.833777 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" podStartSLOduration=142.833756547 podStartE2EDuration="2m22.833756547s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:49.831356987 +0000 UTC m=+163.813320193" watchObservedRunningTime="2026-01-21 17:58:49.833756547 +0000 UTC m=+163.815719733" Jan 21 17:58:49 crc kubenswrapper[4792]: I0121 17:58:49.909321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:49 crc kubenswrapper[4792]: E0121 17:58:49.909733 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.409718036 +0000 UTC m=+164.391681222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.010958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.011409 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.511389245 +0000 UTC m=+164.493352431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.112046 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.112365 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.612323065 +0000 UTC m=+164.594286251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.112463 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.113100 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.613070403 +0000 UTC m=+164.595033589 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.214450 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.214726 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.714707762 +0000 UTC m=+164.696670948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.313137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" event={"ID":"e668a4f5-eb70-407c-91de-67acaaa1e03a","Type":"ContainerStarted","Data":"b3c0ea8aae8d92c17176f61000a77db7d2d7b0693673d76739aad241c0818549"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.313214 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" event={"ID":"e668a4f5-eb70-407c-91de-67acaaa1e03a","Type":"ContainerStarted","Data":"2e0b582b58943f398f471d3ea93c14944ae7e353e4e8b69aa60264fb27949806"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.317479 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" event={"ID":"a4386205-1780-4ef2-a790-edab679ee8a7","Type":"ContainerStarted","Data":"fffce872e20556361b099044d81d0ee4c351449725cf3355235cdfca251a772e"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.319520 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-p8ffl" event={"ID":"72615ab0-4d57-4849-89f9-3f660e97825b","Type":"ContainerStarted","Data":"72634499acedd70ec8c6fc34b4b50227e1c2c9238df288a308ec11e712a449c8"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.323069 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" event={"ID":"7bf30f38-58cd-46e3-b3af-4f2c47124d2a","Type":"ContainerStarted","Data":"29f83735a55c6941b22f481c1c6ae72884480e74e83ee594135d988e0b1d4e5c"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.325474 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" event={"ID":"526cf46b-9c0e-4ea2-850f-e9fba77a9449","Type":"ContainerStarted","Data":"ee7b48bd3b47cbd9a8e6d8dd70119b26dbf090b531e61633a9365ed2a8034286"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.325675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.326268 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.826249066 +0000 UTC m=+164.808212262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.327687 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nxvx" event={"ID":"a0ea4efe-8d03-4090-b537-344474f2cdac","Type":"ContainerStarted","Data":"1f4034c795847970bd779f93f8c1c26a45f07d2579348fc5edff8d818d512cbc"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.330039 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" event={"ID":"aa38141d-14db-436f-b9f7-9270a301ef1e","Type":"ContainerStarted","Data":"1c8729252378f8d4af5e45a42a3d00477c78bacca68a1ff7c3a700523a7a77c1"} Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.335825 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.335917 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.409180 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" podStartSLOduration=144.409147518 podStartE2EDuration="2m24.409147518s" podCreationTimestamp="2026-01-21 17:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:50.407511057 +0000 UTC m=+164.389474253" watchObservedRunningTime="2026-01-21 17:58:50.409147518 +0000 UTC m=+164.391110704" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.428236 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.430789 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:50.930763035 +0000 UTC m=+164.912726231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.482038 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nm5x9" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.488181 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" podStartSLOduration=143.488153872 podStartE2EDuration="2m23.488153872s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:50.4716064 +0000 UTC m=+164.453569576" watchObservedRunningTime="2026-01-21 17:58:50.488153872 +0000 UTC m=+164.470117058" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.520431 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f659j" podStartSLOduration=143.520410304 podStartE2EDuration="2m23.520410304s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:50.498969092 +0000 UTC m=+164.480932298" watchObservedRunningTime="2026-01-21 17:58:50.520410304 +0000 UTC m=+164.502373490" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.541027 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.541361 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.041346015 +0000 UTC m=+165.023309201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.553258 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7v9hk" podStartSLOduration=143.553241632 podStartE2EDuration="2m23.553241632s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:50.552746439 +0000 UTC m=+164.534709625" watchObservedRunningTime="2026-01-21 17:58:50.553241632 +0000 UTC m=+164.535204808" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.559229 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:50 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:50 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:50 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.559305 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.642131 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.642301 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.142274205 +0000 UTC m=+165.124237391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.642407 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.642709 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.142697266 +0000 UTC m=+165.124660442 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.877696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.878152 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.378118701 +0000 UTC m=+165.360081897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:50 crc kubenswrapper[4792]: I0121 17:58:50.980511 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:50 crc kubenswrapper[4792]: E0121 17:58:50.980840 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.480827746 +0000 UTC m=+165.462790932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.081430 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.081637 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.581610452 +0000 UTC m=+165.563573648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.081676 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.081726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.082055 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.582043332 +0000 UTC m=+165.564006518 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.090822 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/97b1a1f0-3533-44d9-8c10-9feb31d988ea-metrics-certs\") pod \"network-metrics-daemon-ddsqn\" (UID: \"97b1a1f0-3533-44d9-8c10-9feb31d988ea\") " pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.180028 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lctf8" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.182516 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.182746 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.682714786 +0000 UTC m=+165.664677992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.182820 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.183206 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.683194259 +0000 UTC m=+165.665157455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.185695 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5jx5t" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.284548 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.284710 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.784683723 +0000 UTC m=+165.766646909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.284917 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.285249 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.785238176 +0000 UTC m=+165.767201372 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.332290 4792 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-bxqhq container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.332361 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.370385 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsqn" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.386678 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.387686 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.887662314 +0000 UTC m=+165.869625500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.489068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.489991 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:51.989975019 +0000 UTC m=+165.971938205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.528439 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:51 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:51 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:51 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.528504 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.593716 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.594279 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.094256282 +0000 UTC m=+166.076219468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.695255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.695836 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.195808338 +0000 UTC m=+166.177771524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.797524 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.797955 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.297930658 +0000 UTC m=+166.279893844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:51 crc kubenswrapper[4792]: I0121 17:58:51.909957 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:51 crc kubenswrapper[4792]: E0121 17:58:51.910305 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.410289682 +0000 UTC m=+166.392252868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.011352 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.011607 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.511565451 +0000 UTC m=+166.493528637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.011724 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.012255 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.512236948 +0000 UTC m=+166.494200134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.113005 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.113375 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.613355753 +0000 UTC m=+166.595318939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.152030 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.153370 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: W0121 17:58:52.175361 4792 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.175419 4792 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.211248 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.212818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.214289 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.214331 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.214363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkmrw\" (UniqueName: \"kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.214389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.214644 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.714632051 +0000 UTC m=+166.696595237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.218160 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.285817 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.286437 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.286462 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.286476 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.287297 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.287382 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.293948 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.294211 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.301588 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.301738 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317133 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317411 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317438 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkmrw\" (UniqueName: \"kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317474 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317532 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqtg2\" (UniqueName: \"kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317552 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317926 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317955 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317977 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8l5j\" (UniqueName: \"kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.317994 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.318117 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.818096365 +0000 UTC m=+166.800059551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.318455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.318917 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.451126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkmrw\" (UniqueName: \"kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw\") pod \"community-operators-c575g\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.463177 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.477752 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.480589 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487082 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487147 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487220 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8l5j\" (UniqueName: \"kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487307 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487354 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.487449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqtg2\" (UniqueName: \"kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.488184 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.488757 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.491645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.492093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.492253 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:52.992231276 +0000 UTC m=+166.974194462 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.492462 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.562781 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:52 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:52 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:52 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.562833 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.590777 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.591382 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfvzm\" (UniqueName: \"kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.591417 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.591444 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.591835 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.091810213 +0000 UTC m=+167.073773399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.672828 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqtg2\" (UniqueName: \"kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2\") pod \"certified-operators-4kppq\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.673172 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.692835 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.692956 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.693017 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.693090 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfvzm\" (UniqueName: \"kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.695001 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.194976578 +0000 UTC m=+167.176939764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.695057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.695224 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.748509 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8l5j\" (UniqueName: \"kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j\") pod \"community-operators-5bd5w\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.798231 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.798793 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.298771169 +0000 UTC m=+167.280734355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.831753 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfvzm\" (UniqueName: \"kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm\") pod \"certified-operators-drcjx\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.887081 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 17:58:52 crc kubenswrapper[4792]: I0121 17:58:52.968165 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:52 crc kubenswrapper[4792]: E0121 17:58:52.968495 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.468481701 +0000 UTC m=+167.450444887 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.029458 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.069217 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.069720 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.569690167 +0000 UTC m=+167.551653373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.091955 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.175419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.175762 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.675749835 +0000 UTC m=+167.657713021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.234168 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.235312 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.241958 4792 patch_prober.go:28] interesting pod/apiserver-76f77b778f-q7mdj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.242014 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" podUID="e668a4f5-eb70-407c-91de-67acaaa1e03a" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.254652 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.255639 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.305492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.305651 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.805624285 +0000 UTC m=+167.787587471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.305823 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.306833 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.306899 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.806884206 +0000 UTC m=+167.788847392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.309877 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ddsqn"] Jan 21 17:58:53 crc kubenswrapper[4792]: W0121 17:58:53.318895 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97b1a1f0_3533_44d9_8c10_9feb31d988ea.slice/crio-86aea7dd4372066c1081e757aa6bcb7a7613d4c495d7f8d38d1051bb69096f7a WatchSource:0}: Error finding container 86aea7dd4372066c1081e757aa6bcb7a7613d4c495d7f8d38d1051bb69096f7a: Status 404 returned error can't find the container with id 86aea7dd4372066c1081e757aa6bcb7a7613d4c495d7f8d38d1051bb69096f7a Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.415097 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.416157 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:53.916140733 +0000 UTC m=+167.898103919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.500394 4792 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-marketplace/community-operators-c575g" secret="" err="failed to sync secret cache: timed out waiting for the condition" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.500488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c575g" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.526706 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.527258 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.027244287 +0000 UTC m=+168.009207473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.538119 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:53 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:53 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:53 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.538487 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.571986 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.572054 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.580038 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" event={"ID":"97b1a1f0-3533-44d9-8c10-9feb31d988ea","Type":"ContainerStarted","Data":"86aea7dd4372066c1081e757aa6bcb7a7613d4c495d7f8d38d1051bb69096f7a"} Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.592128 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4k5zt" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.627801 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.628057 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.127999213 +0000 UTC m=+168.109962399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.628122 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.628504 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.128492595 +0000 UTC m=+168.110455781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.729093 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.730165 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.230146504 +0000 UTC m=+168.212109690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.751780 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.751988 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.770627 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.773014 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.779693 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.783394 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.805255 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.831244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.831595 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.331579246 +0000 UTC m=+168.313542432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.932197 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.932455 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88l7z\" (UniqueName: \"kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.932505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.932541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:53 crc kubenswrapper[4792]: E0121 17:58:53.932714 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.432696421 +0000 UTC m=+168.414659607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:53 crc kubenswrapper[4792]: I0121 17:58:53.939421 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 17:58:54 crc kubenswrapper[4792]: W0121 17:58:54.024799 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf2f54b1_35a4_4c74_a656_68dfb3e5e2bd.slice/crio-f46744ef1a6b65e9000d0860e4e4370e7eb4cc519ea49e32cca967544d6298af WatchSource:0}: Error finding container f46744ef1a6b65e9000d0860e4e4370e7eb4cc519ea49e32cca967544d6298af: Status 404 returned error can't find the container with id f46744ef1a6b65e9000d0860e4e4370e7eb4cc519ea49e32cca967544d6298af Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.036554 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88l7z\" (UniqueName: \"kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.036615 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.036647 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.036677 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.037035 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.537021656 +0000 UTC m=+168.518984842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.037707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.037977 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.071104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.099009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88l7z\" (UniqueName: \"kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z\") pod \"redhat-marketplace-l85mb\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.133974 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.169576 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.170075 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.670053605 +0000 UTC m=+168.652016791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.289139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.289517 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.789484895 +0000 UTC m=+168.771448081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.362659 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.364013 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.412565 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.413729 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.420909 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.913897129 +0000 UTC m=+168.895860315 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.421237 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dj48\" (UniqueName: \"kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.421300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.421449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.421604 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.422929 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:54.922909873 +0000 UTC m=+168.904873049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.529567 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.529801 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dj48\" (UniqueName: \"kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.529830 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.529895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.530307 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.030284904 +0000 UTC m=+169.012248090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.530389 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.530629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.544861 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:54 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:54 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:54 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.545128 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.613022 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dj48\" (UniqueName: \"kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48\") pod \"redhat-marketplace-kk56x\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.618708 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerStarted","Data":"5b4bfb4d495bea0232909cfeeafb46cba51faa6f3aef967e20cc95fbbbae7999"} Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.625211 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.628381 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" event={"ID":"aa38141d-14db-436f-b9f7-9270a301ef1e","Type":"ContainerStarted","Data":"ddf6cfeee1646cc452d821d0ee2e6ef00046d562ebfa959ec05f8f60fff073c0"} Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.631127 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.631444 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.131431339 +0000 UTC m=+169.113394525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.684385 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerStarted","Data":"f46744ef1a6b65e9000d0860e4e4370e7eb4cc519ea49e32cca967544d6298af"} Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.715798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"011ef36b-6682-4478-aed2-78475fbf0728","Type":"ContainerStarted","Data":"c5bb2dc55154cb6f7eba89606f56437bb7b9116b371431e3a9259f06d654d0bb"} Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.733955 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.734078 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.234059463 +0000 UTC m=+169.216022649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.734388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.735673 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.235656272 +0000 UTC m=+169.217619458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.835563 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.836034 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.336014719 +0000 UTC m=+169.317977905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.937674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:54 crc kubenswrapper[4792]: E0121 17:58:54.938097 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.438078826 +0000 UTC m=+169.420042012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.991725 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.993087 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:54 crc kubenswrapper[4792]: I0121 17:58:54.995129 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.003232 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.038977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.039196 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.539176841 +0000 UTC m=+169.521140027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.039266 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.039297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cvsq\" (UniqueName: \"kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.039403 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.039687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.040110 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.540094263 +0000 UTC m=+169.522057449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.070258 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 17:58:55 crc kubenswrapper[4792]: W0121 17:58:55.096756 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1018828e_7a21_4a36_83a9_c87d6aaa38c3.slice/crio-d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950 WatchSource:0}: Error finding container d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950: Status 404 returned error can't find the container with id d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950 Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.146235 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.146509 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.146543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cvsq\" (UniqueName: \"kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.146566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.147062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.147147 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.647131456 +0000 UTC m=+169.629094642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.147387 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.175189 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cvsq\" (UniqueName: \"kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq\") pod \"redhat-operators-d4jqh\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.247752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.248154 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.748139258 +0000 UTC m=+169.730102444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.321682 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.329980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 17:58:55 crc kubenswrapper[4792]: W0121 17:58:55.340392 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ee97d71_6a45_4d65_9242_fe34a6d15f52.slice/crio-83cebf25ab16a1aa2458a9dfbe045994a1b9dc99576f14ae0daa2b78033e9683 WatchSource:0}: Error finding container 83cebf25ab16a1aa2458a9dfbe045994a1b9dc99576f14ae0daa2b78033e9683: Status 404 returned error can't find the container with id 83cebf25ab16a1aa2458a9dfbe045994a1b9dc99576f14ae0daa2b78033e9683 Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.348249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.348619 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.848606177 +0000 UTC m=+169.830569363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.351435 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.365050 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.368964 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.372782 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.468427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.468509 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.468535 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz58k\" (UniqueName: \"kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.468577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.468903 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:55.968891859 +0000 UTC m=+169.950855045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.583080 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.583374 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.083343465 +0000 UTC m=+170.065306651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.583417 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.583483 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.583510 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz58k\" (UniqueName: \"kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.583540 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.584124 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.584409 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.584729 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.084718399 +0000 UTC m=+170.066681685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.585041 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:55 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:55 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:55 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.585090 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.625761 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.631304 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.639337 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.644202 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.644490 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.661148 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.695216 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz58k\" (UniqueName: \"kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k\") pod \"redhat-operators-2rld8\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.698421 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.705147 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.708467 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.208445247 +0000 UTC m=+170.190408423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.708522 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.708572 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.713768 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.213746918 +0000 UTC m=+170.195710104 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.727415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerStarted","Data":"d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950"} Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.728693 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerStarted","Data":"150a55bed7e9513c397ce4e671fabf25663c64e74ffaa84f229b17701da84c35"} Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.739089 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.789618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerStarted","Data":"707aaad46246aebf33837e4b3abb662c1da5afaf27122cfe4534cc788139cc7d"} Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.798574 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerStarted","Data":"83cebf25ab16a1aa2458a9dfbe045994a1b9dc99576f14ae0daa2b78033e9683"} Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.840720 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.841053 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.841110 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.841793 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.341778613 +0000 UTC m=+170.323741799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.842498 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.909327 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:55 crc kubenswrapper[4792]: I0121 17:58:55.963926 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:55 crc kubenswrapper[4792]: E0121 17:58:55.964334 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.46432022 +0000 UTC m=+170.446283406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.065095 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.065231 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.565208879 +0000 UTC m=+170.547172065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.065767 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.066085 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.566077301 +0000 UTC m=+170.548040487 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.079398 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.169719 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.170171 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.670135619 +0000 UTC m=+170.652098805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.170334 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.170720 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.670707783 +0000 UTC m=+170.652670959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.171126 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.271070 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.271746 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.771731346 +0000 UTC m=+170.753694532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.357181 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.372707 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.373051 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.873038095 +0000 UTC m=+170.855001291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: W0121 17:58:56.380996 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93c3d3cc_1042_4c4d_bc8c_80360b6ca310.slice/crio-e5e38f4e6779ed8fc453c5df95e39e3197c8180118b8c60e6a20d879051d2a51 WatchSource:0}: Error finding container e5e38f4e6779ed8fc453c5df95e39e3197c8180118b8c60e6a20d879051d2a51: Status 404 returned error can't find the container with id e5e38f4e6779ed8fc453c5df95e39e3197c8180118b8c60e6a20d879051d2a51 Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.473603 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.474106 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:58:56.974086998 +0000 UTC m=+170.956050184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.515731 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.535040 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:56 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:56 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:56 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.535090 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.570096 4792 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.582140 4792 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-21T17:58:56.570153518Z","Handler":null,"Name":""} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.583104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: E0121 17:58:56.583550 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:58:57.083533211 +0000 UTC m=+171.065496407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xscbt" (UID: "15a12100-5704-4b1b-b9db-4961709b2587") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.647088 4792 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.647138 4792 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.685308 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.695821 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.786759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.815621 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.815655 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.822880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerStarted","Data":"614c69936623a2134225c9bf6d9ad211ff1ac4a07ef6b62a3bd439f913dc0599"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.824960 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" event={"ID":"aa38141d-14db-436f-b9f7-9270a301ef1e","Type":"ContainerStarted","Data":"13c9158f4622fac9d716df003c404623e531df3259ad76f29024ec4cceec331e"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.832692 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerStarted","Data":"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.843052 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"011ef36b-6682-4478-aed2-78475fbf0728","Type":"ContainerStarted","Data":"e6417fd77e794f871b5cc203be9a6bab11e9bf3460eea655d6ebd6653c50053c"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.874640 4792 generic.go:334] "Generic (PLEG): container finished" podID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerID="056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f" exitCode=0 Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.874712 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerDied","Data":"056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.876861 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fc4138a1-47dc-4954-9af5-fdd8f53588e5","Type":"ContainerStarted","Data":"a5e69742ff0153b6619f6155c4bb858ee5485ae6342a7b63a42c7881c585033f"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.880475 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" event={"ID":"97b1a1f0-3533-44d9-8c10-9feb31d988ea","Type":"ContainerStarted","Data":"c09ff7227bc6b84b8170ee21c96acf06a2c24af3c17343461d630f846e047f6e"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.882840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerStarted","Data":"912e239f3e17301de26252cf0de8f6b5607e8eebcd9fb4345c33fb32e725ca34"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.884084 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerStarted","Data":"eb6962ae70b83122c0f2994039eb80c7baffbf9f30d2ff441e100c7b1c2e947e"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.884948 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerStarted","Data":"e5e38f4e6779ed8fc453c5df95e39e3197c8180118b8c60e6a20d879051d2a51"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.886381 4792 generic.go:334] "Generic (PLEG): container finished" podID="c181f61f-a9d3-4d0e-84d2-aa2bd560017b" containerID="7f565a65bf855b0ebfd5423f864d2872a7d30231d2c8736d4c1bfc616cb3d67b" exitCode=0 Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.886417 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" event={"ID":"c181f61f-a9d3-4d0e-84d2-aa2bd560017b","Type":"ContainerDied","Data":"7f565a65bf855b0ebfd5423f864d2872a7d30231d2c8736d4c1bfc616cb3d67b"} Jan 21 17:58:56 crc kubenswrapper[4792]: I0121 17:58:56.983079 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xscbt\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.265242 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:57 crc kubenswrapper[4792]: E0121 17:58:57.358118 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc09a117_7791_479f_8477_46cd94afcc09.slice/crio-614c69936623a2134225c9bf6d9ad211ff1ac4a07ef6b62a3bd439f913dc0599.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.546567 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:57 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:57 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:57 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.546983 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.799777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 17:58:57 crc kubenswrapper[4792]: W0121 17:58:57.802598 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15a12100_5704_4b1b_b9db_4961709b2587.slice/crio-7f14eaf3284f8cec53124445f5d2ffd3833e608b9f8600c2b6ead09e9d5a5e10 WatchSource:0}: Error finding container 7f14eaf3284f8cec53124445f5d2ffd3833e608b9f8600c2b6ead09e9d5a5e10: Status 404 returned error can't find the container with id 7f14eaf3284f8cec53124445f5d2ffd3833e608b9f8600c2b6ead09e9d5a5e10 Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.940983 4792 generic.go:334] "Generic (PLEG): container finished" podID="106746c3-4570-4081-90ec-a6f1afd6dade" containerID="23d89e97943eb3d1d4c420f74abcfc9e5cd0fe912dcc5343dd416b054d8c074d" exitCode=0 Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.941107 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerDied","Data":"23d89e97943eb3d1d4c420f74abcfc9e5cd0fe912dcc5343dd416b054d8c074d"} Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.943119 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.943708 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerID="1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12" exitCode=0 Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.943760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerDied","Data":"1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12"} Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.950583 4792 generic.go:334] "Generic (PLEG): container finished" podID="cc09a117-7791-479f-8477-46cd94afcc09" containerID="614c69936623a2134225c9bf6d9ad211ff1ac4a07ef6b62a3bd439f913dc0599" exitCode=0 Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.950646 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerDied","Data":"614c69936623a2134225c9bf6d9ad211ff1ac4a07ef6b62a3bd439f913dc0599"} Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.971639 4792 generic.go:334] "Generic (PLEG): container finished" podID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerID="93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d" exitCode=0 Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.971749 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerDied","Data":"93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d"} Jan 21 17:58:57 crc kubenswrapper[4792]: I0121 17:58:57.991832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" event={"ID":"15a12100-5704-4b1b-b9db-4961709b2587","Type":"ContainerStarted","Data":"7f14eaf3284f8cec53124445f5d2ffd3833e608b9f8600c2b6ead09e9d5a5e10"} Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.007426 4792 generic.go:334] "Generic (PLEG): container finished" podID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerID="9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747" exitCode=0 Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.007617 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerDied","Data":"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747"} Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.020800 4792 generic.go:334] "Generic (PLEG): container finished" podID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerID="eb6962ae70b83122c0f2994039eb80c7baffbf9f30d2ff441e100c7b1c2e947e" exitCode=0 Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.021989 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerDied","Data":"eb6962ae70b83122c0f2994039eb80c7baffbf9f30d2ff441e100c7b1c2e947e"} Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.125746 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.126079 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.127018 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ddsqn" podStartSLOduration=151.126999798 podStartE2EDuration="2m31.126999798s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:58.124811323 +0000 UTC m=+172.106774509" watchObservedRunningTime="2026-01-21 17:58:58.126999798 +0000 UTC m=+172.108962984" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.128130 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.128179 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.265290 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=6.265256837 podStartE2EDuration="6.265256837s" podCreationTimestamp="2026-01-21 17:58:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:58.264414185 +0000 UTC m=+172.246377391" watchObservedRunningTime="2026-01-21 17:58:58.265256837 +0000 UTC m=+172.247220013" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.291506 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.292336 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.292425 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-kzpjg" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.475028 4792 patch_prober.go:28] interesting pod/console-f9d7485db-65vgm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.475091 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-65vgm" podUID="1166298c-e29b-4c0a-b153-d92acf85b0d2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.530547 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:58 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.530609 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.588018 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.621717 4792 patch_prober.go:28] interesting pod/apiserver-76f77b778f-q7mdj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 21 17:58:58 crc kubenswrapper[4792]: [+]log ok Jan 21 17:58:58 crc kubenswrapper[4792]: [+]etcd ok Jan 21 17:58:58 crc kubenswrapper[4792]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 21 17:58:58 crc kubenswrapper[4792]: [+]poststarthook/generic-apiserver-start-informers ok Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/max-in-flight-filter failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/storage-object-count-tracker-hook failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/image.openshift.io-apiserver-caches failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [+]poststarthook/project.openshift.io-projectcache ok Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/project.openshift.io-projectauthorizationcache failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 21 17:58:58 crc kubenswrapper[4792]: [-]poststarthook/quota.openshift.io-clusterquotamapping failed: reason withheld Jan 21 17:58:58 crc kubenswrapper[4792]: livez check failed Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.621780 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" podUID="e668a4f5-eb70-407c-91de-67acaaa1e03a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.779201 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume\") pod \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.779452 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume\") pod \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.779770 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frfhs\" (UniqueName: \"kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs\") pod \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\" (UID: \"c181f61f-a9d3-4d0e-84d2-aa2bd560017b\") " Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.781819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume" (OuterVolumeSpecName: "config-volume") pod "c181f61f-a9d3-4d0e-84d2-aa2bd560017b" (UID: "c181f61f-a9d3-4d0e-84d2-aa2bd560017b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.788313 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs" (OuterVolumeSpecName: "kube-api-access-frfhs") pod "c181f61f-a9d3-4d0e-84d2-aa2bd560017b" (UID: "c181f61f-a9d3-4d0e-84d2-aa2bd560017b"). InnerVolumeSpecName "kube-api-access-frfhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.796738 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c181f61f-a9d3-4d0e-84d2-aa2bd560017b" (UID: "c181f61f-a9d3-4d0e-84d2-aa2bd560017b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.804457 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.843126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.851063 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.886559 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.886593 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:58:58 crc kubenswrapper[4792]: I0121 17:58:58.886604 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frfhs\" (UniqueName: \"kubernetes.io/projected/c181f61f-a9d3-4d0e-84d2-aa2bd560017b-kube-api-access-frfhs\") on node \"crc\" DevicePath \"\"" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.052632 4792 generic.go:334] "Generic (PLEG): container finished" podID="fc4138a1-47dc-4954-9af5-fdd8f53588e5" containerID="cf2d6635e2e6dc9daf5082e87d81791ec32ee427259de2a2951c8fddac9fa1d1" exitCode=0 Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.052709 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fc4138a1-47dc-4954-9af5-fdd8f53588e5","Type":"ContainerDied","Data":"cf2d6635e2e6dc9daf5082e87d81791ec32ee427259de2a2951c8fddac9fa1d1"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.054840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" event={"ID":"15a12100-5704-4b1b-b9db-4961709b2587","Type":"ContainerStarted","Data":"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.055556 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.060213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" event={"ID":"aa38141d-14db-436f-b9f7-9270a301ef1e","Type":"ContainerStarted","Data":"615349f710c4ebd3e5d327ce0312f6e7de7bf3dac8d4bed6b158d1d95f9b18d3"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.066572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsqn" event={"ID":"97b1a1f0-3533-44d9-8c10-9feb31d988ea","Type":"ContainerStarted","Data":"76ba060c009380375dc1297115d981e592fbd9c867640c207fef04cdbac7d60c"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.068830 4792 generic.go:334] "Generic (PLEG): container finished" podID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerID="fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a" exitCode=0 Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.068890 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerDied","Data":"fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.074781 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" event={"ID":"c181f61f-a9d3-4d0e-84d2-aa2bd560017b","Type":"ContainerDied","Data":"31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.074817 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31fe822ab3e4c48a08ebfb7c83d572d1c3d486f416bbfcbb368ef9acbc2d53e0" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.074889 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.077862 4792 generic.go:334] "Generic (PLEG): container finished" podID="011ef36b-6682-4478-aed2-78475fbf0728" containerID="e6417fd77e794f871b5cc203be9a6bab11e9bf3460eea655d6ebd6653c50053c" exitCode=0 Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.078366 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"011ef36b-6682-4478-aed2-78475fbf0728","Type":"ContainerDied","Data":"e6417fd77e794f871b5cc203be9a6bab11e9bf3460eea655d6ebd6653c50053c"} Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.104347 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" podStartSLOduration=152.104323484 podStartE2EDuration="2m32.104323484s" podCreationTimestamp="2026-01-21 17:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:59.10055369 +0000 UTC m=+173.082516876" watchObservedRunningTime="2026-01-21 17:58:59.104323484 +0000 UTC m=+173.086286670" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.157969 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ts9kq" podStartSLOduration=24.157944588 podStartE2EDuration="24.157944588s" podCreationTimestamp="2026-01-21 17:58:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:58:59.142289508 +0000 UTC m=+173.124252694" watchObservedRunningTime="2026-01-21 17:58:59.157944588 +0000 UTC m=+173.139907774" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.528073 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:58:59 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:58:59 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:58:59 crc kubenswrapper[4792]: healthz check failed Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.528144 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:58:59 crc kubenswrapper[4792]: I0121 17:58:59.577919 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6nxvx" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.536150 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:00 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:00 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:00 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.536527 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.709536 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.740275 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access\") pod \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.740383 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir\") pod \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\" (UID: \"fc4138a1-47dc-4954-9af5-fdd8f53588e5\") " Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.740585 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fc4138a1-47dc-4954-9af5-fdd8f53588e5" (UID: "fc4138a1-47dc-4954-9af5-fdd8f53588e5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.747203 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fc4138a1-47dc-4954-9af5-fdd8f53588e5" (UID: "fc4138a1-47dc-4954-9af5-fdd8f53588e5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.769647 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.841248 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir\") pod \"011ef36b-6682-4478-aed2-78475fbf0728\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.841351 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access\") pod \"011ef36b-6682-4478-aed2-78475fbf0728\" (UID: \"011ef36b-6682-4478-aed2-78475fbf0728\") " Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.841569 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.841587 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc4138a1-47dc-4954-9af5-fdd8f53588e5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.842383 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "011ef36b-6682-4478-aed2-78475fbf0728" (UID: "011ef36b-6682-4478-aed2-78475fbf0728"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.847375 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "011ef36b-6682-4478-aed2-78475fbf0728" (UID: "011ef36b-6682-4478-aed2-78475fbf0728"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.942588 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/011ef36b-6682-4478-aed2-78475fbf0728-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:00 crc kubenswrapper[4792]: I0121 17:59:00.942629 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/011ef36b-6682-4478-aed2-78475fbf0728-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.138383 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"011ef36b-6682-4478-aed2-78475fbf0728","Type":"ContainerDied","Data":"c5bb2dc55154cb6f7eba89606f56437bb7b9116b371431e3a9259f06d654d0bb"} Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.139077 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5bb2dc55154cb6f7eba89606f56437bb7b9116b371431e3a9259f06d654d0bb" Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.138433 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.141159 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.146214 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fc4138a1-47dc-4954-9af5-fdd8f53588e5","Type":"ContainerDied","Data":"a5e69742ff0153b6619f6155c4bb858ee5485ae6342a7b63a42c7881c585033f"} Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.146281 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5e69742ff0153b6619f6155c4bb858ee5485ae6342a7b63a42c7881c585033f" Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.529048 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:01 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:01 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:01 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:01 crc kubenswrapper[4792]: I0121 17:59:01.529140 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:02 crc kubenswrapper[4792]: I0121 17:59:02.527212 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:02 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:02 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:02 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:02 crc kubenswrapper[4792]: I0121 17:59:02.527301 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:03 crc kubenswrapper[4792]: I0121 17:59:03.243342 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:59:03 crc kubenswrapper[4792]: I0121 17:59:03.248474 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-q7mdj" Jan 21 17:59:03 crc kubenswrapper[4792]: I0121 17:59:03.528230 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:03 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:03 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:03 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:03 crc kubenswrapper[4792]: I0121 17:59:03.528668 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:04 crc kubenswrapper[4792]: I0121 17:59:04.549883 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:04 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:04 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:04 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:04 crc kubenswrapper[4792]: I0121 17:59:04.550062 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:05 crc kubenswrapper[4792]: I0121 17:59:05.529160 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:05 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:05 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:05 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:05 crc kubenswrapper[4792]: I0121 17:59:05.529272 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:06 crc kubenswrapper[4792]: I0121 17:59:06.578317 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:06 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Jan 21 17:59:06 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:06 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:06 crc kubenswrapper[4792]: I0121 17:59:06.578747 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:07 crc kubenswrapper[4792]: I0121 17:59:07.547042 4792 patch_prober.go:28] interesting pod/router-default-5444994796-p5dgb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:59:07 crc kubenswrapper[4792]: [+]has-synced ok Jan 21 17:59:07 crc kubenswrapper[4792]: [+]process-running ok Jan 21 17:59:07 crc kubenswrapper[4792]: healthz check failed Jan 21 17:59:07 crc kubenswrapper[4792]: I0121 17:59:07.547591 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5dgb" podUID="6c6b48a4-1067-4a71-b66a-24a9677e6c37" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.125141 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.125307 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.125310 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.125409 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.476439 4792 patch_prober.go:28] interesting pod/console-f9d7485db-65vgm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.476508 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-65vgm" podUID="1166298c-e29b-4c0a-b153-d92acf85b0d2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.530303 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:59:08 crc kubenswrapper[4792]: I0121 17:59:08.534073 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-p5dgb" Jan 21 17:59:13 crc kubenswrapper[4792]: I0121 17:59:13.864130 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:59:17 crc kubenswrapper[4792]: I0121 17:59:17.274650 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.125691 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.125788 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.126193 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.126234 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.126274 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.126973 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a"} pod="openshift-console/downloads-7954f5f757-xpn4x" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.127067 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" containerID="cri-o://ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a" gracePeriod=2 Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.127838 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.127931 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.496384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.503465 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-65vgm" Jan 21 17:59:18 crc kubenswrapper[4792]: I0121 17:59:18.841781 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ft2zn" Jan 21 17:59:21 crc kubenswrapper[4792]: I0121 17:59:21.710442 4792 generic.go:334] "Generic (PLEG): container finished" podID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerID="ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a" exitCode=0 Jan 21 17:59:21 crc kubenswrapper[4792]: I0121 17:59:21.710734 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerDied","Data":"ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a"} Jan 21 17:59:23 crc kubenswrapper[4792]: I0121 17:59:23.570415 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:59:23 crc kubenswrapper[4792]: I0121 17:59:23.570496 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:59:28 crc kubenswrapper[4792]: I0121 17:59:28.124957 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:28 crc kubenswrapper[4792]: I0121 17:59:28.125311 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.180465 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 17:59:36 crc kubenswrapper[4792]: E0121 17:59:36.181313 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c181f61f-a9d3-4d0e-84d2-aa2bd560017b" containerName="collect-profiles" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181329 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c181f61f-a9d3-4d0e-84d2-aa2bd560017b" containerName="collect-profiles" Jan 21 17:59:36 crc kubenswrapper[4792]: E0121 17:59:36.181345 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc4138a1-47dc-4954-9af5-fdd8f53588e5" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181351 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc4138a1-47dc-4954-9af5-fdd8f53588e5" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: E0121 17:59:36.181361 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="011ef36b-6682-4478-aed2-78475fbf0728" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181369 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="011ef36b-6682-4478-aed2-78475fbf0728" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181468 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="011ef36b-6682-4478-aed2-78475fbf0728" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181486 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c181f61f-a9d3-4d0e-84d2-aa2bd560017b" containerName="collect-profiles" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.181494 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc4138a1-47dc-4954-9af5-fdd8f53588e5" containerName="pruner" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.182091 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.185463 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.186795 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.196557 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.287653 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.287823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.388707 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.388806 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.388834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.409180 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:36 crc kubenswrapper[4792]: I0121 17:59:36.497176 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:59:38 crc kubenswrapper[4792]: I0121 17:59:38.126372 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:38 crc kubenswrapper[4792]: I0121 17:59:38.126781 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.177253 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.178293 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.188733 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.265047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.265132 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.265179 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.365908 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.366062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.366438 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.366549 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.366815 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.387765 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access\") pod \"installer-9-crc\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:41 crc kubenswrapper[4792]: I0121 17:59:41.500318 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:59:48 crc kubenswrapper[4792]: I0121 17:59:48.124930 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:48 crc kubenswrapper[4792]: I0121 17:59:48.125568 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 17:59:53 crc kubenswrapper[4792]: I0121 17:59:53.570742 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:59:53 crc kubenswrapper[4792]: I0121 17:59:53.572053 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:59:53 crc kubenswrapper[4792]: I0121 17:59:53.572173 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 17:59:53 crc kubenswrapper[4792]: I0121 17:59:53.573087 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:59:53 crc kubenswrapper[4792]: I0121 17:59:53.573154 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba" gracePeriod=600 Jan 21 17:59:58 crc kubenswrapper[4792]: I0121 17:59:58.125663 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 17:59:58 crc kubenswrapper[4792]: I0121 17:59:58.126272 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.138907 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8"] Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.139785 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.141972 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.143037 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.148414 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8"] Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.289697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.289771 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.289817 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8x5b\" (UniqueName: \"kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.391148 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.391229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.392040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8x5b\" (UniqueName: \"kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.392302 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.397703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.408492 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8x5b\" (UniqueName: \"kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b\") pod \"collect-profiles-29483640-crld8\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:00 crc kubenswrapper[4792]: I0121 18:00:00.460408 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:02 crc kubenswrapper[4792]: I0121 18:00:02.399707 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba" exitCode=0 Jan 21 18:00:02 crc kubenswrapper[4792]: I0121 18:00:02.400019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba"} Jan 21 18:00:08 crc kubenswrapper[4792]: I0121 18:00:08.125241 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:08 crc kubenswrapper[4792]: I0121 18:00:08.125698 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:18 crc kubenswrapper[4792]: I0121 18:00:18.125177 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:18 crc kubenswrapper[4792]: I0121 18:00:18.126090 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.854535 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.855488 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tz58k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-2rld8_openshift-marketplace(93c3d3cc-1042-4c4d-bc8c-80360b6ca310): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.856777 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-2rld8" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.940194 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.940428 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9cvsq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-d4jqh_openshift-marketplace(106746c3-4570-4081-90ec-a6f1afd6dade): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:22 crc kubenswrapper[4792]: E0121 18:00:22.941658 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-d4jqh" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" Jan 21 18:00:24 crc kubenswrapper[4792]: E0121 18:00:24.573565 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-2rld8" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" Jan 21 18:00:24 crc kubenswrapper[4792]: E0121 18:00:24.574311 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-d4jqh" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" Jan 21 18:00:24 crc kubenswrapper[4792]: E0121 18:00:24.650366 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 21 18:00:24 crc kubenswrapper[4792]: E0121 18:00:24.650669 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wfvzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-drcjx_openshift-marketplace(cc09a117-7791-479f-8477-46cd94afcc09): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:24 crc kubenswrapper[4792]: E0121 18:00:24.651910 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-drcjx" podUID="cc09a117-7791-479f-8477-46cd94afcc09" Jan 21 18:00:28 crc kubenswrapper[4792]: I0121 18:00:28.125495 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:28 crc kubenswrapper[4792]: I0121 18:00:28.125574 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:28 crc kubenswrapper[4792]: E0121 18:00:28.236933 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-drcjx" podUID="cc09a117-7791-479f-8477-46cd94afcc09" Jan 21 18:00:28 crc kubenswrapper[4792]: E0121 18:00:28.383253 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 21 18:00:28 crc kubenswrapper[4792]: E0121 18:00:28.383440 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7dj48,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kk56x_openshift-marketplace(3ee97d71-6a45-4d65-9242-fe34a6d15f52): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:28 crc kubenswrapper[4792]: E0121 18:00:28.384698 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kk56x" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.427045 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kk56x" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.826612 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.827893 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zkmrw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-c575g_openshift-marketplace(1018828e-7a21-4a36-83a9-c87d6aaa38c3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.829161 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-c575g" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.865397 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.865605 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zqtg2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4kppq_openshift-marketplace(bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:30 crc kubenswrapper[4792]: E0121 18:00:30.867087 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4kppq" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.029276 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.029523 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-88l7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-l85mb_openshift-marketplace(e6f739f0-719a-4454-bbaf-b4b5c624f084): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.031031 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-l85mb" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.045208 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8"] Jan 21 18:00:31 crc kubenswrapper[4792]: W0121 18:00:31.051437 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3790b108_c0de_47f7_af45_05e506227385.slice/crio-31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f WatchSource:0}: Error finding container 31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f: Status 404 returned error can't find the container with id 31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.064519 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.064789 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h8l5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-5bd5w_openshift-marketplace(c18e5f7c-5ebf-4166-82ab-c29a81232623): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.065988 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-5bd5w" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.165226 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.180274 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 18:00:31 crc kubenswrapper[4792]: W0121 18:00:31.224114 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod68bfd337_d05a_40cf_bcbb_6d4f44788aff.slice/crio-aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be WatchSource:0}: Error finding container aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be: Status 404 returned error can't find the container with id aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.567768 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"315a20c5-d12c-498e-8305-5be91162fee4","Type":"ContainerStarted","Data":"30d7c80790978440e8ac511ef3141ef102b8daebbd3e371c9f78ea4ecc185290"} Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.570411 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51"} Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.573324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerStarted","Data":"d510a19734d9f1dbea22f649c92de29cc7b96b8a2a08d985a040edfbc93a308f"} Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.573373 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.574197 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.574240 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.575054 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68bfd337-d05a-40cf-bcbb-6d4f44788aff","Type":"ContainerStarted","Data":"aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be"} Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.576401 4792 generic.go:334] "Generic (PLEG): container finished" podID="3790b108-c0de-47f7-af45-05e506227385" containerID="fd6a74f940f0f3eaf7cf372679f19728e66d8b68a4628f855d2a9b9cf417f597" exitCode=0 Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.576572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" event={"ID":"3790b108-c0de-47f7-af45-05e506227385","Type":"ContainerDied","Data":"fd6a74f940f0f3eaf7cf372679f19728e66d8b68a4628f855d2a9b9cf417f597"} Jan 21 18:00:31 crc kubenswrapper[4792]: I0121 18:00:31.576597 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" event={"ID":"3790b108-c0de-47f7-af45-05e506227385","Type":"ContainerStarted","Data":"31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f"} Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.577893 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4kppq" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.578189 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-5bd5w" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.578200 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-c575g" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" Jan 21 18:00:31 crc kubenswrapper[4792]: E0121 18:00:31.578615 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-l85mb" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.582405 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68bfd337-d05a-40cf-bcbb-6d4f44788aff","Type":"ContainerStarted","Data":"e3cecc4790a9188aef71a635e449c158aa59227590a8c26b5324e9053fa2b9fa"} Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.585024 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.585074 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.584421 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"315a20c5-d12c-498e-8305-5be91162fee4","Type":"ContainerStarted","Data":"9f73221fe53b6cf84cb579af0c9fc7730390cf61df6933aca875867266ace9f1"} Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.602978 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=51.602954999 podStartE2EDuration="51.602954999s" podCreationTimestamp="2026-01-21 17:59:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:00:32.599552844 +0000 UTC m=+266.581516030" watchObservedRunningTime="2026-01-21 18:00:32.602954999 +0000 UTC m=+266.584918185" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.618945 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=56.61892672 podStartE2EDuration="56.61892672s" podCreationTimestamp="2026-01-21 17:59:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:00:32.618775816 +0000 UTC m=+266.600739002" watchObservedRunningTime="2026-01-21 18:00:32.61892672 +0000 UTC m=+266.600889906" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.889044 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.974295 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume\") pod \"3790b108-c0de-47f7-af45-05e506227385\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.974390 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8x5b\" (UniqueName: \"kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b\") pod \"3790b108-c0de-47f7-af45-05e506227385\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.975165 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume" (OuterVolumeSpecName: "config-volume") pod "3790b108-c0de-47f7-af45-05e506227385" (UID: "3790b108-c0de-47f7-af45-05e506227385"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.975624 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume\") pod \"3790b108-c0de-47f7-af45-05e506227385\" (UID: \"3790b108-c0de-47f7-af45-05e506227385\") " Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.975920 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3790b108-c0de-47f7-af45-05e506227385-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.995164 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b" (OuterVolumeSpecName: "kube-api-access-q8x5b") pod "3790b108-c0de-47f7-af45-05e506227385" (UID: "3790b108-c0de-47f7-af45-05e506227385"). InnerVolumeSpecName "kube-api-access-q8x5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:00:32 crc kubenswrapper[4792]: I0121 18:00:32.995181 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3790b108-c0de-47f7-af45-05e506227385" (UID: "3790b108-c0de-47f7-af45-05e506227385"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:00:33 crc kubenswrapper[4792]: I0121 18:00:33.076878 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3790b108-c0de-47f7-af45-05e506227385-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:33 crc kubenswrapper[4792]: I0121 18:00:33.076936 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8x5b\" (UniqueName: \"kubernetes.io/projected/3790b108-c0de-47f7-af45-05e506227385-kube-api-access-q8x5b\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:33 crc kubenswrapper[4792]: I0121 18:00:33.590578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" event={"ID":"3790b108-c0de-47f7-af45-05e506227385","Type":"ContainerDied","Data":"31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f"} Jan 21 18:00:33 crc kubenswrapper[4792]: I0121 18:00:33.591011 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31a883302c8f83230d165b919352416d50ca3fee042551ec05281e63206b111f" Jan 21 18:00:33 crc kubenswrapper[4792]: I0121 18:00:33.590753 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8" Jan 21 18:00:37 crc kubenswrapper[4792]: I0121 18:00:37.614142 4792 generic.go:334] "Generic (PLEG): container finished" podID="315a20c5-d12c-498e-8305-5be91162fee4" containerID="9f73221fe53b6cf84cb579af0c9fc7730390cf61df6933aca875867266ace9f1" exitCode=0 Jan 21 18:00:37 crc kubenswrapper[4792]: I0121 18:00:37.614215 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"315a20c5-d12c-498e-8305-5be91162fee4","Type":"ContainerDied","Data":"9f73221fe53b6cf84cb579af0c9fc7730390cf61df6933aca875867266ace9f1"} Jan 21 18:00:38 crc kubenswrapper[4792]: I0121 18:00:38.124837 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:38 crc kubenswrapper[4792]: I0121 18:00:38.125291 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:38 crc kubenswrapper[4792]: I0121 18:00:38.124925 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:38 crc kubenswrapper[4792]: I0121 18:00:38.125490 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:38 crc kubenswrapper[4792]: I0121 18:00:38.870113 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.056641 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir\") pod \"315a20c5-d12c-498e-8305-5be91162fee4\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.056707 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access\") pod \"315a20c5-d12c-498e-8305-5be91162fee4\" (UID: \"315a20c5-d12c-498e-8305-5be91162fee4\") " Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.056768 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "315a20c5-d12c-498e-8305-5be91162fee4" (UID: "315a20c5-d12c-498e-8305-5be91162fee4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.057036 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/315a20c5-d12c-498e-8305-5be91162fee4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.061615 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "315a20c5-d12c-498e-8305-5be91162fee4" (UID: "315a20c5-d12c-498e-8305-5be91162fee4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.158455 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/315a20c5-d12c-498e-8305-5be91162fee4-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.627368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"315a20c5-d12c-498e-8305-5be91162fee4","Type":"ContainerDied","Data":"30d7c80790978440e8ac511ef3141ef102b8daebbd3e371c9f78ea4ecc185290"} Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.627424 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30d7c80790978440e8ac511ef3141ef102b8daebbd3e371c9f78ea4ecc185290" Jan 21 18:00:39 crc kubenswrapper[4792]: I0121 18:00:39.627470 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 18:00:48 crc kubenswrapper[4792]: I0121 18:00:48.125500 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:48 crc kubenswrapper[4792]: I0121 18:00:48.126141 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:00:48 crc kubenswrapper[4792]: I0121 18:00:48.125648 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:00:48 crc kubenswrapper[4792]: I0121 18:00:48.126247 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.125255 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.125916 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.125295 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126013 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126074 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126599 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126699 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126819 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"d510a19734d9f1dbea22f649c92de29cc7b96b8a2a08d985a040edfbc93a308f"} pod="openshift-console/downloads-7954f5f757-xpn4x" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:00:58.126915 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" containerID="cri-o://d510a19734d9f1dbea22f649c92de29cc7b96b8a2a08d985a040edfbc93a308f" gracePeriod=2 Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:01:01.804214 4792 generic.go:334] "Generic (PLEG): container finished" podID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerID="d510a19734d9f1dbea22f649c92de29cc7b96b8a2a08d985a040edfbc93a308f" exitCode=0 Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:01:01.804393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerDied","Data":"d510a19734d9f1dbea22f649c92de29cc7b96b8a2a08d985a040edfbc93a308f"} Jan 21 18:01:01 crc kubenswrapper[4792]: I0121 18:01:01.804860 4792 scope.go:117] "RemoveContainer" containerID="ad97330e98c6fbf4db1ecb521975a1e3fb03b9a40ee35bb1dc3bcb7c5d99835a" Jan 21 18:01:02 crc kubenswrapper[4792]: I0121 18:01:02.814336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerStarted","Data":"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444"} Jan 21 18:01:03 crc kubenswrapper[4792]: I0121 18:01:03.823307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerStarted","Data":"1a2d00816c8d9a596d8b64b1265476172ccac7f8d93040e81422922c3e3ccc82"} Jan 21 18:01:03 crc kubenswrapper[4792]: I0121 18:01:03.824983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerStarted","Data":"ce1309e19d5401417e1707e0e55aab57aaf4b5c25cfb581828be7dc257263dba"} Jan 21 18:01:03 crc kubenswrapper[4792]: I0121 18:01:03.826386 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xpn4x" event={"ID":"1001cdf3-8e91-4e17-a8f7-a92b91daf23e","Type":"ContainerStarted","Data":"7073fabbba0ab0f5c9aefa7fa88d2a50632874ffebdae1b3ba929202d3869036"} Jan 21 18:01:04 crc kubenswrapper[4792]: I0121 18:01:04.834596 4792 generic.go:334] "Generic (PLEG): container finished" podID="cc09a117-7791-479f-8477-46cd94afcc09" containerID="1a2d00816c8d9a596d8b64b1265476172ccac7f8d93040e81422922c3e3ccc82" exitCode=0 Jan 21 18:01:04 crc kubenswrapper[4792]: I0121 18:01:04.834697 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerDied","Data":"1a2d00816c8d9a596d8b64b1265476172ccac7f8d93040e81422922c3e3ccc82"} Jan 21 18:01:04 crc kubenswrapper[4792]: I0121 18:01:04.839120 4792 generic.go:334] "Generic (PLEG): container finished" podID="106746c3-4570-4081-90ec-a6f1afd6dade" containerID="ce1309e19d5401417e1707e0e55aab57aaf4b5c25cfb581828be7dc257263dba" exitCode=0 Jan 21 18:01:04 crc kubenswrapper[4792]: I0121 18:01:04.840800 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerDied","Data":"ce1309e19d5401417e1707e0e55aab57aaf4b5c25cfb581828be7dc257263dba"} Jan 21 18:01:05 crc kubenswrapper[4792]: I0121 18:01:05.847288 4792 generic.go:334] "Generic (PLEG): container finished" podID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerID="ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444" exitCode=0 Jan 21 18:01:05 crc kubenswrapper[4792]: I0121 18:01:05.847394 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerDied","Data":"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444"} Jan 21 18:01:06 crc kubenswrapper[4792]: I0121 18:01:06.132790 4792 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 21 18:01:06 crc kubenswrapper[4792]: I0121 18:01:06.852384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 18:01:06 crc kubenswrapper[4792]: I0121 18:01:06.854536 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:06 crc kubenswrapper[4792]: I0121 18:01:06.854674 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:07 crc kubenswrapper[4792]: I0121 18:01:07.861310 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:07 crc kubenswrapper[4792]: I0121 18:01:07.861770 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:08 crc kubenswrapper[4792]: I0121 18:01:08.125470 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:08 crc kubenswrapper[4792]: I0121 18:01:08.125613 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-xpn4x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Jan 21 18:01:08 crc kubenswrapper[4792]: I0121 18:01:08.125634 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:08 crc kubenswrapper[4792]: I0121 18:01:08.125552 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xpn4x" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.19:8080/\": dial tcp 10.217.0.19:8080: connect: connection refused" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.465231 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.465559 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3790b108-c0de-47f7-af45-05e506227385" containerName="collect-profiles" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.465577 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3790b108-c0de-47f7-af45-05e506227385" containerName="collect-profiles" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.465598 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315a20c5-d12c-498e-8305-5be91162fee4" containerName="pruner" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.465607 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="315a20c5-d12c-498e-8305-5be91162fee4" containerName="pruner" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.465725 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3790b108-c0de-47f7-af45-05e506227385" containerName="collect-profiles" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.465739 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="315a20c5-d12c-498e-8305-5be91162fee4" containerName="pruner" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466159 4792 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466385 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466567 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5" gracePeriod=15 Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466603 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63" gracePeriod=15 Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466719 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8" gracePeriod=15 Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466793 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc" gracePeriod=15 Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.466749 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462" gracePeriod=15 Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.472473 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.472938 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.472967 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.472985 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.472995 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.473009 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473018 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.473035 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473044 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.473057 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473066 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 18:01:09 crc kubenswrapper[4792]: E0121 18:01:09.473085 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473095 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473244 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473265 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473286 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473300 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.473311 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.509418 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635028 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635084 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635139 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635179 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635236 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.635303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.736981 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737054 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737140 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737178 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737154 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737210 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737238 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737280 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737344 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737390 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.737413 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.806828 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.934095 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:01:09 crc kubenswrapper[4792]: I0121 18:01:09.935973 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc" exitCode=2 Jan 21 18:01:11 crc kubenswrapper[4792]: I0121 18:01:11.065631 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:01:11 crc kubenswrapper[4792]: I0121 18:01:11.066376 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462" exitCode=0 Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.076501 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.078486 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63" exitCode=0 Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.078602 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8" exitCode=0 Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.081023 4792 generic.go:334] "Generic (PLEG): container finished" podID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" containerID="e3cecc4790a9188aef71a635e449c158aa59227590a8c26b5324e9053fa2b9fa" exitCode=0 Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.081437 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68bfd337-d05a-40cf-bcbb-6d4f44788aff","Type":"ContainerDied","Data":"e3cecc4790a9188aef71a635e449c158aa59227590a8c26b5324e9053fa2b9fa"} Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.082254 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:12 crc kubenswrapper[4792]: I0121 18:01:12.082773 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:13 crc kubenswrapper[4792]: I0121 18:01:13.090057 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:01:13 crc kubenswrapper[4792]: I0121 18:01:13.090984 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5" exitCode=0 Jan 21 18:01:16 crc kubenswrapper[4792]: I0121 18:01:16.250701 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:16 crc kubenswrapper[4792]: I0121 18:01:16.251766 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: I0121 18:01:18.146671 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-xpn4x" Jan 21 18:01:18 crc kubenswrapper[4792]: I0121 18:01:18.147763 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: I0121 18:01:18.148388 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: I0121 18:01:18.148692 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.905890 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.906501 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.906794 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.907139 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.907509 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:18 crc kubenswrapper[4792]: I0121 18:01:18.907554 4792 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 21 18:01:18 crc kubenswrapper[4792]: E0121 18:01:18.907882 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="200ms" Jan 21 18:01:19 crc kubenswrapper[4792]: E0121 18:01:19.108646 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="400ms" Jan 21 18:01:19 crc kubenswrapper[4792]: E0121 18:01:19.510768 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="800ms" Jan 21 18:01:20 crc kubenswrapper[4792]: E0121 18:01:20.311680 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="1.6s" Jan 21 18:01:21 crc kubenswrapper[4792]: E0121 18:01:21.913953 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="3.2s" Jan 21 18:01:25 crc kubenswrapper[4792]: E0121 18:01:25.115484 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="6.4s" Jan 21 18:01:26 crc kubenswrapper[4792]: I0121 18:01:26.252841 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:26 crc kubenswrapper[4792]: I0121 18:01:26.253572 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:26 crc kubenswrapper[4792]: I0121 18:01:26.254459 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:27 crc kubenswrapper[4792]: I0121 18:01:27.221907 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Readiness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 21 18:01:27 crc kubenswrapper[4792]: I0121 18:01:27.221971 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 21 18:01:27 crc kubenswrapper[4792]: E0121 18:01:27.222418 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event=< Jan 21 18:01:27 crc kubenswrapper[4792]: &Event{ObjectMeta:{kube-controller-manager-crc.188cd0f103f6a1e8 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:ProbeError,Message:Readiness probe error: Get "https://192.168.126.11:10257/healthz": dial tcp 192.168.126.11:10257: connect: connection refused Jan 21 18:01:27 crc kubenswrapper[4792]: body: Jan 21 18:01:27 crc kubenswrapper[4792]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,LastTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Jan 21 18:01:27 crc kubenswrapper[4792]: > Jan 21 18:01:27 crc kubenswrapper[4792]: I0121 18:01:27.469477 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 21 18:01:27 crc kubenswrapper[4792]: I0121 18:01:27.469544 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 21 18:01:31 crc kubenswrapper[4792]: E0121 18:01:31.518070 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.235393 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.235497 4792 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66" exitCode=1 Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.235554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66"} Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.236305 4792 scope.go:117] "RemoveContainer" containerID="668ec7950eaf6dfd1b10085ecd687e793ab68423a57084ad55ec69bb904f9c66" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.236526 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.236891 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.237079 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:33 crc kubenswrapper[4792]: I0121 18:01:33.237499 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:33 crc kubenswrapper[4792]: E0121 18:01:33.629051 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event=< Jan 21 18:01:33 crc kubenswrapper[4792]: &Event{ObjectMeta:{kube-controller-manager-crc.188cd0f103f6a1e8 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:ProbeError,Message:Readiness probe error: Get "https://192.168.126.11:10257/healthz": dial tcp 192.168.126.11:10257: connect: connection refused Jan 21 18:01:33 crc kubenswrapper[4792]: body: Jan 21 18:01:33 crc kubenswrapper[4792]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,LastTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Jan 21 18:01:33 crc kubenswrapper[4792]: > Jan 21 18:01:36 crc kubenswrapper[4792]: I0121 18:01:36.253446 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:36 crc kubenswrapper[4792]: I0121 18:01:36.254713 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:36 crc kubenswrapper[4792]: I0121 18:01:36.255445 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:36 crc kubenswrapper[4792]: I0121 18:01:36.255866 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:37 crc kubenswrapper[4792]: I0121 18:01:37.220830 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:01:37 crc kubenswrapper[4792]: I0121 18:01:37.467970 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:01:38 crc kubenswrapper[4792]: I0121 18:01:38.048011 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:01:38 crc kubenswrapper[4792]: E0121 18:01:38.520759 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.648728 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.650071 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.650434 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.650932 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.651223 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.684155 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir\") pod \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.684321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock\") pod \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.684366 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access\") pod \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\" (UID: \"68bfd337-d05a-40cf-bcbb-6d4f44788aff\") " Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.685956 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "68bfd337-d05a-40cf-bcbb-6d4f44788aff" (UID: "68bfd337-d05a-40cf-bcbb-6d4f44788aff"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.686067 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock" (OuterVolumeSpecName: "var-lock") pod "68bfd337-d05a-40cf-bcbb-6d4f44788aff" (UID: "68bfd337-d05a-40cf-bcbb-6d4f44788aff"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.691589 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "68bfd337-d05a-40cf-bcbb-6d4f44788aff" (UID: "68bfd337-d05a-40cf-bcbb-6d4f44788aff"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.786491 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.786551 4792 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68bfd337-d05a-40cf-bcbb-6d4f44788aff-var-lock\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:41 crc kubenswrapper[4792]: I0121 18:01:41.786563 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68bfd337-d05a-40cf-bcbb-6d4f44788aff-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.293268 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68bfd337-d05a-40cf-bcbb-6d4f44788aff","Type":"ContainerDied","Data":"aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be"} Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.293697 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaa5d645e47b3c39a15cbba122e156704b5a7ad3477e1c8d9273c769191134be" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.293368 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.297255 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.297509 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.297731 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:42 crc kubenswrapper[4792]: I0121 18:01:42.297981 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:43 crc kubenswrapper[4792]: E0121 18:01:43.630818 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event=< Jan 21 18:01:43 crc kubenswrapper[4792]: &Event{ObjectMeta:{kube-controller-manager-crc.188cd0f103f6a1e8 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:ProbeError,Message:Readiness probe error: Get "https://192.168.126.11:10257/healthz": dial tcp 192.168.126.11:10257: connect: connection refused Jan 21 18:01:43 crc kubenswrapper[4792]: body: Jan 21 18:01:43 crc kubenswrapper[4792]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,LastTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Jan 21 18:01:43 crc kubenswrapper[4792]: > Jan 21 18:01:45 crc kubenswrapper[4792]: E0121 18:01:45.521964 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:01:46 crc kubenswrapper[4792]: I0121 18:01:46.249002 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:46 crc kubenswrapper[4792]: I0121 18:01:46.249425 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:46 crc kubenswrapper[4792]: I0121 18:01:46.249711 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:46 crc kubenswrapper[4792]: I0121 18:01:46.249997 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:52 crc kubenswrapper[4792]: E0121 18:01:52.523197 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:01:53 crc kubenswrapper[4792]: E0121 18:01:53.631899 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event=< Jan 21 18:01:53 crc kubenswrapper[4792]: &Event{ObjectMeta:{kube-controller-manager-crc.188cd0f103f6a1e8 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:ProbeError,Message:Readiness probe error: Get "https://192.168.126.11:10257/healthz": dial tcp 192.168.126.11:10257: connect: connection refused Jan 21 18:01:53 crc kubenswrapper[4792]: body: Jan 21 18:01:53 crc kubenswrapper[4792]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,LastTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Jan 21 18:01:53 crc kubenswrapper[4792]: > Jan 21 18:01:56 crc kubenswrapper[4792]: I0121 18:01:56.259162 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:56 crc kubenswrapper[4792]: I0121 18:01:56.261667 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:56 crc kubenswrapper[4792]: I0121 18:01:56.262205 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:56 crc kubenswrapper[4792]: I0121 18:01:56.262553 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:01:59 crc kubenswrapper[4792]: E0121 18:01:59.525513 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.405339 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-node-identity_network-node-identity-vrzqb_ef543e1b-8068-4ea3-b32a-61027b32e95d/approver/0.log" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.405697 4792 generic.go:334] "Generic (PLEG): container finished" podID="ef543e1b-8068-4ea3-b32a-61027b32e95d" containerID="4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e" exitCode=1 Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.405731 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerDied","Data":"4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e"} Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.406321 4792 scope.go:117] "RemoveContainer" containerID="4fce702cb4fdeffe9e5fa1ba48f84e1486d04aa1e76b567af61d1bb226c6ec3e" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.406505 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.406767 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.407039 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.407266 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:00 crc kubenswrapper[4792]: I0121 18:02:00.407500 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.419300 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.420511 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4005d09501fc495240993d1765b6c8c94ab14abf2b800843d2f43d54ca65055" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.422893 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.423540 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.424935 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.425648 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.427313 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.427817 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.428286 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.428457 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:01 crc kubenswrapper[4792]: W0121 18:02:01.466260 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-6674a9f19ce73d9089b71dccb676f7a729b06da635a7617286fe35247909c509 WatchSource:0}: Error finding container 6674a9f19ce73d9089b71dccb676f7a729b06da635a7617286fe35247909c509: Status 404 returned error can't find the container with id 6674a9f19ce73d9089b71dccb676f7a729b06da635a7617286fe35247909c509 Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.589461 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.589626 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590077 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590100 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590222 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590493 4792 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590513 4792 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:01 crc kubenswrapper[4792]: I0121 18:02:01.590526 4792 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.255694 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.428208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerStarted","Data":"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a"} Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.429660 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.429888 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.430090 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.430287 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.430470 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.430651 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.432999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6674a9f19ce73d9089b71dccb676f7a729b06da635a7617286fe35247909c509"} Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.435170 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerStarted","Data":"097ce796343fcfa6a82768d22ef8e67f82cd3ea746c910a1f9fc14b43421a07f"} Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.436191 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.436748 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.437638 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.438647 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-node-identity_network-node-identity-vrzqb_ef543e1b-8068-4ea3-b32a-61027b32e95d/approver/0.log" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.439108 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.439471 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1239b37db509a34aa231bc63cfd73324f8a08b2fa752c99f6fe58f408acd3540"} Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.439527 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.440625 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.441002 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.441218 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.441776 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.442099 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.442489 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.442762 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.443650 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.443936 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.444206 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.444442 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.444725 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.445054 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.445290 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.445515 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.445746 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.445999 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.446233 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:02 crc kubenswrapper[4792]: I0121 18:02:02.446464 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:03 crc kubenswrapper[4792]: E0121 18:02:03.633205 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/events\": dial tcp 38.129.56.68:6443: connect: connection refused" event=< Jan 21 18:02:03 crc kubenswrapper[4792]: &Event{ObjectMeta:{kube-controller-manager-crc.188cd0f103f6a1e8 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:ProbeError,Message:Readiness probe error: Get "https://192.168.126.11:10257/healthz": dial tcp 192.168.126.11:10257: connect: connection refused Jan 21 18:02:03 crc kubenswrapper[4792]: body: Jan 21 18:02:03 crc kubenswrapper[4792]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,LastTimestamp:2026-01-21 18:01:27.221953 +0000 UTC m=+321.203916176,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Jan 21 18:02:03 crc kubenswrapper[4792]: > Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.690504 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.693212 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerStarted","Data":"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.696513 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerStarted","Data":"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.700655 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.701147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9a9d9a7a29fe16ac4d092aed84eadfb10376435956e06135740be067a1253a67"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.701721 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.702100 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.703146 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.703459 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.705706 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.706469 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.706772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerStarted","Data":"228a9e14f5c9c643cf4e9ebb7486d726a2ed1621ea7cc2ae0c732f0fc5476bd2"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.706973 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.708769 4792 generic.go:334] "Generic (PLEG): container finished" podID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerID="2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a" exitCode=0 Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.708867 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerDied","Data":"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.710515 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.710794 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.711164 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.712048 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.712392 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.713137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerStarted","Data":"7be40c72c549b12d3b6ea6b84125bb04e6a21170fbc5bc2cd26f187c872e2e75"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.713173 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.714835 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.715383 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.716145 4792 generic.go:334] "Generic (PLEG): container finished" podID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerID="097ce796343fcfa6a82768d22ef8e67f82cd3ea746c910a1f9fc14b43421a07f" exitCode=0 Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.716199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerDied","Data":"097ce796343fcfa6a82768d22ef8e67f82cd3ea746c910a1f9fc14b43421a07f"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.717402 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.717910 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.718536 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerStarted","Data":"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.718704 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.719057 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.719331 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.719582 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.719737 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.720074 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.720423 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.720815 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.721034 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.722255 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.722337 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerStarted","Data":"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096"} Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.723010 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.723325 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.723700 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.724080 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.724444 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.724772 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:04 crc kubenswrapper[4792]: I0121 18:02:04.725100 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: E0121 18:02:05.307136 4792 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.129.56.68:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" volumeName="registry-storage" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.323341 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.323456 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.699374 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.699988 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.755128 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerID="19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c" exitCode=0 Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.755210 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerDied","Data":"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c"} Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.756259 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.756682 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.757768 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.758157 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.758629 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.758832 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.759060 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.759333 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.759496 4792 generic.go:334] "Generic (PLEG): container finished" podID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerID="e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e" exitCode=0 Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.759668 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerDied","Data":"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e"} Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.759783 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.764593 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.765026 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.765416 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.765592 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.765751 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.765939 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766093 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766289 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766468 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766631 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766785 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.766950 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.767096 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.767259 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:05 crc kubenswrapper[4792]: I0121 18:02:05.767429 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.249540 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.250394 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.250968 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.251193 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.251410 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.251721 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.252207 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.252604 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.253075 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.253295 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.253707 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.254755 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.258065 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: E0121 18:02:06.528601 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" interval="7s" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.718399 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d4jqh" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="registry-server" probeResult="failure" output=< Jan 21 18:02:06 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Jan 21 18:02:06 crc kubenswrapper[4792]: > Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.756818 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2rld8" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="registry-server" probeResult="failure" output=< Jan 21 18:02:06 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Jan 21 18:02:06 crc kubenswrapper[4792]: > Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.768572 4792 generic.go:334] "Generic (PLEG): container finished" podID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerID="3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096" exitCode=0 Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.768616 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerDied","Data":"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096"} Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.770948 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.772467 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.773000 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.773269 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.773502 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.773766 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.774103 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.774312 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.774487 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.774662 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.774831 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.775029 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:06 crc kubenswrapper[4792]: I0121 18:02:06.775233 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.220978 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.778329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerStarted","Data":"b6584bed35fdc8e912f8a486a70ff330f77dd4411792d788dff8f09e34010af9"} Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.779471 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.779780 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.780258 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.780506 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.780656 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.780820 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781025 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781196 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781371 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781539 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781739 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.781899 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:07 crc kubenswrapper[4792]: I0121 18:02:07.782045 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.048185 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.053961 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.054798 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.055293 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.055500 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.055685 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.055925 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.056109 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.056473 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.056994 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.057453 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.057767 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.058164 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.058404 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.058708 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.784913 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerStarted","Data":"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011"} Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.786034 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.786284 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.786457 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.786795 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.787926 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.788328 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.788782 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.789094 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.789323 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.789465 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.789695 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.789871 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.790025 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.790469 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerStarted","Data":"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c"} Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.790700 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.790979 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.791168 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.791532 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792015 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792194 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792354 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792501 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792648 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792809 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.792993 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.793140 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:08 crc kubenswrapper[4792]: I0121 18:02:08.793275 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.799054 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerStarted","Data":"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475"} Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.800616 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.800790 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.800981 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.801190 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.801347 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.801531 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.801700 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.801942 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.802116 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.802278 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.802439 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.802607 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.802798 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.805732 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerStarted","Data":"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4"} Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.806828 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.807042 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.807458 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.808242 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.808464 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.808887 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.809096 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.809292 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.809504 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.809715 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.809946 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.810153 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: I0121 18:02:09.810364 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.831031 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T18:02:09Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T18:02:09Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T18:02:09Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T18:02:09Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:020b5bee2bbd09fbf64a1af808628bb76e9c70b9efdc49f38e5a50641590514c\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:78f8ee56f09c047b3acd7e5b6b8a0f9534952f418b658c9f5a6d45d12546e67c\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1670570239},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[],\\\"sizeBytes\\\":1202744046},{\\\"names\\\":[],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:c10fecd0ba9b4f4f77af571afe82506201ee1139d1904e61b94987e47659a271\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:c44546b94a5203c84127195a969fe508a3c8e632c14d08b60a6cc3f15d19cc0d\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1167523055},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.831377 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.831777 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.832312 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.832604 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:09 crc kubenswrapper[4792]: E0121 18:02:09.832638 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.249081 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.252931 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.253804 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.254565 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.254978 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.255351 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.255943 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.256144 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.256380 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.258184 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.258462 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.258718 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.259062 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.259276 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.265898 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.265932 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:12 crc kubenswrapper[4792]: E0121 18:02:12.266625 4792 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.267303 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:12 crc kubenswrapper[4792]: W0121 18:02:12.308747 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-9c25b4e55f2203117b2dc19a40685f6ed2525d4c7efa2250300ef28f0ad241fc WatchSource:0}: Error finding container 9c25b4e55f2203117b2dc19a40685f6ed2525d4c7efa2250300ef28f0ad241fc: Status 404 returned error can't find the container with id 9c25b4e55f2203117b2dc19a40685f6ed2525d4c7efa2250300ef28f0ad241fc Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.831490 4792 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0d04c4c5017ee46184cc875a56acc459cc63ca6c2edeaf75c4910fd23cac7467" exitCode=0 Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.831715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0d04c4c5017ee46184cc875a56acc459cc63ca6c2edeaf75c4910fd23cac7467"} Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.831953 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9c25b4e55f2203117b2dc19a40685f6ed2525d4c7efa2250300ef28f0ad241fc"} Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.832301 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.832319 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:12 crc kubenswrapper[4792]: E0121 18:02:12.832836 4792 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.833525 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.834008 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.834568 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.834975 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.835664 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.836699 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.837120 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.837499 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.839052 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.839436 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.839995 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.840347 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.840659 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.888086 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.888494 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.941928 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.942747 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.943518 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.944045 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.944268 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.944468 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.944678 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.944900 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.945137 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.945461 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.946027 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.946401 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.946664 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:12 crc kubenswrapper[4792]: I0121 18:02:12.946951 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.092910 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.092994 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.139531 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.141360 4792 status_manager.go:851] "Failed to get status for pod" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" pod="openshift-marketplace/redhat-marketplace-kk56x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kk56x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.141915 4792 status_manager.go:851] "Failed to get status for pod" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" pod="openshift-marketplace/community-operators-5bd5w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-5bd5w\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.142312 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.142677 4792 status_manager.go:851] "Failed to get status for pod" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" pod="openshift-marketplace/redhat-operators-2rld8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2rld8\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.142942 4792 status_manager.go:851] "Failed to get status for pod" podUID="cc09a117-7791-479f-8477-46cd94afcc09" pod="openshift-marketplace/certified-operators-drcjx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-drcjx\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.143223 4792 status_manager.go:851] "Failed to get status for pod" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" pod="openshift-marketplace/certified-operators-4kppq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4kppq\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.143578 4792 status_manager.go:851] "Failed to get status for pod" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" pod="openshift-marketplace/redhat-marketplace-l85mb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-l85mb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.143886 4792 status_manager.go:851] "Failed to get status for pod" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-node-identity/pods/network-node-identity-vrzqb\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.144149 4792 status_manager.go:851] "Failed to get status for pod" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" pod="openshift-marketplace/redhat-operators-d4jqh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d4jqh\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.144447 4792 status_manager.go:851] "Failed to get status for pod" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" pod="openshift-marketplace/community-operators-c575g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-c575g\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.144665 4792 status_manager.go:851] "Failed to get status for pod" podUID="1001cdf3-8e91-4e17-a8f7-a92b91daf23e" pod="openshift-console/downloads-7954f5f757-xpn4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/pods/downloads-7954f5f757-xpn4x\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.144917 4792 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.145179 4792 status_manager.go:851] "Failed to get status for pod" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.68:6443: connect: connection refused" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.502960 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.503061 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.752972 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.753520 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.810110 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.841545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4dd489e3f8fa232ea9d330547ccfa71bd6cc5ff844d19496d4533b76f2c9de54"} Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.841616 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4b19a72aa75db398b8ca8432c0c41d4ac7915866dc5219d1d23179ec04e3005a"} Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.893502 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:02:13 crc kubenswrapper[4792]: I0121 18:02:13.901985 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.135544 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.135605 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.205328 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.279480 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.327771 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.626139 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.626195 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.889630 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.907629 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:02:14 crc kubenswrapper[4792]: I0121 18:02:14.970157 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:02:15 crc kubenswrapper[4792]: I0121 18:02:15.590649 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:02:16 crc kubenswrapper[4792]: I0121 18:02:16.005284 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:02:16 crc kubenswrapper[4792]: I0121 18:02:16.258622 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:02:16 crc kubenswrapper[4792]: I0121 18:02:16.314973 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:02:16 crc kubenswrapper[4792]: I0121 18:02:16.992133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b7583f1c4fac12ad6a24a8d1bd24d0ffef5e735dd9c29a73d8390cabece6b579"} Jan 21 18:02:17 crc kubenswrapper[4792]: I0121 18:02:17.295029 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 18:02:19 crc kubenswrapper[4792]: I0121 18:02:19.138567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"13ad5496824ddb752ed86bb72a3a5c61187bd399a7a659e46c55f0496ff37720"} Jan 21 18:02:20 crc kubenswrapper[4792]: I0121 18:02:20.153262 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"25543b3c1b107724e0534d55faf75adac399c9a99bd4aa50fcd38d36cb250895"} Jan 21 18:02:20 crc kubenswrapper[4792]: I0121 18:02:20.155538 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:20 crc kubenswrapper[4792]: I0121 18:02:20.153656 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:20 crc kubenswrapper[4792]: I0121 18:02:20.155752 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:20 crc kubenswrapper[4792]: I0121 18:02:20.164357 4792 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:02:21 crc kubenswrapper[4792]: I0121 18:02:21.159958 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:21 crc kubenswrapper[4792]: I0121 18:02:21.160265 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:02:22 crc kubenswrapper[4792]: I0121 18:02:22.148757 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="75635792-6044-4a8a-a6e9-0d2538f64f75" Jan 21 18:02:22 crc kubenswrapper[4792]: I0121 18:02:22.942790 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:02:28 crc kubenswrapper[4792]: I0121 18:02:28.219753 4792 generic.go:334] "Generic (PLEG): container finished" podID="a15d3491-6301-49f2-a196-df5db956aa82" containerID="89176767e2d7ab24fa9279370842d98d235f15b1682693406c811d7fa8abb730" exitCode=0 Jan 21 18:02:28 crc kubenswrapper[4792]: I0121 18:02:28.220941 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"89176767e2d7ab24fa9279370842d98d235f15b1682693406c811d7fa8abb730"} Jan 21 18:02:28 crc kubenswrapper[4792]: I0121 18:02:28.221463 4792 scope.go:117] "RemoveContainer" containerID="89176767e2d7ab24fa9279370842d98d235f15b1682693406c811d7fa8abb730" Jan 21 18:02:28 crc kubenswrapper[4792]: I0121 18:02:28.843213 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:28 crc kubenswrapper[4792]: I0121 18:02:28.843774 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:29 crc kubenswrapper[4792]: I0121 18:02:29.228444 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/1.log" Jan 21 18:02:29 crc kubenswrapper[4792]: I0121 18:02:29.228993 4792 generic.go:334] "Generic (PLEG): container finished" podID="a15d3491-6301-49f2-a196-df5db956aa82" containerID="36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401" exitCode=1 Jan 21 18:02:29 crc kubenswrapper[4792]: I0121 18:02:29.229030 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401"} Jan 21 18:02:29 crc kubenswrapper[4792]: I0121 18:02:29.229071 4792 scope.go:117] "RemoveContainer" containerID="89176767e2d7ab24fa9279370842d98d235f15b1682693406c811d7fa8abb730" Jan 21 18:02:29 crc kubenswrapper[4792]: I0121 18:02:29.229561 4792 scope.go:117] "RemoveContainer" containerID="36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401" Jan 21 18:02:29 crc kubenswrapper[4792]: E0121 18:02:29.229808 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:02:30 crc kubenswrapper[4792]: I0121 18:02:30.236404 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/1.log" Jan 21 18:02:30 crc kubenswrapper[4792]: I0121 18:02:30.237294 4792 scope.go:117] "RemoveContainer" containerID="36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401" Jan 21 18:02:30 crc kubenswrapper[4792]: E0121 18:02:30.237490 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:02:38 crc kubenswrapper[4792]: I0121 18:02:38.843042 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:38 crc kubenswrapper[4792]: I0121 18:02:38.843430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:38 crc kubenswrapper[4792]: I0121 18:02:38.844197 4792 scope.go:117] "RemoveContainer" containerID="36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401" Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.303220 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/2.log" Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.304308 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/1.log" Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.304667 4792 generic.go:334] "Generic (PLEG): container finished" podID="a15d3491-6301-49f2-a196-df5db956aa82" containerID="52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9" exitCode=1 Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.304766 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9"} Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.304836 4792 scope.go:117] "RemoveContainer" containerID="36039983dec12cefdcb8c4078db666155fc9de44cb9fb66ebb04686c526f2401" Jan 21 18:02:39 crc kubenswrapper[4792]: I0121 18:02:39.305604 4792 scope.go:117] "RemoveContainer" containerID="52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9" Jan 21 18:02:39 crc kubenswrapper[4792]: E0121 18:02:39.305994 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:02:40 crc kubenswrapper[4792]: I0121 18:02:40.385574 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/2.log" Jan 21 18:02:48 crc kubenswrapper[4792]: I0121 18:02:48.843235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:48 crc kubenswrapper[4792]: I0121 18:02:48.843877 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:02:48 crc kubenswrapper[4792]: I0121 18:02:48.844542 4792 scope.go:117] "RemoveContainer" containerID="52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9" Jan 21 18:02:48 crc kubenswrapper[4792]: E0121 18:02:48.844756 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:02:51 crc kubenswrapper[4792]: I0121 18:02:51.418583 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 21 18:02:52 crc kubenswrapper[4792]: I0121 18:02:52.812111 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 21 18:02:53 crc kubenswrapper[4792]: I0121 18:02:53.002058 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 21 18:02:53 crc kubenswrapper[4792]: I0121 18:02:53.471578 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 21 18:02:53 crc kubenswrapper[4792]: I0121 18:02:53.570793 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:02:53 crc kubenswrapper[4792]: I0121 18:02:53.570913 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:02:54 crc kubenswrapper[4792]: I0121 18:02:54.221379 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 21 18:02:54 crc kubenswrapper[4792]: I0121 18:02:54.760992 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 21 18:02:54 crc kubenswrapper[4792]: I0121 18:02:54.812841 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 21 18:02:55 crc kubenswrapper[4792]: I0121 18:02:55.802089 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 21 18:02:55 crc kubenswrapper[4792]: I0121 18:02:55.927354 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 21 18:02:55 crc kubenswrapper[4792]: I0121 18:02:55.930557 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 21 18:02:56 crc kubenswrapper[4792]: I0121 18:02:56.435228 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 21 18:02:56 crc kubenswrapper[4792]: I0121 18:02:56.451584 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 18:02:56 crc kubenswrapper[4792]: I0121 18:02:56.901437 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 21 18:02:56 crc kubenswrapper[4792]: I0121 18:02:56.971740 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 21 18:02:57 crc kubenswrapper[4792]: I0121 18:02:57.275893 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 21 18:02:57 crc kubenswrapper[4792]: I0121 18:02:57.347873 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 18:02:57 crc kubenswrapper[4792]: I0121 18:02:57.452862 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 21 18:02:57 crc kubenswrapper[4792]: I0121 18:02:57.464405 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 21 18:02:58 crc kubenswrapper[4792]: I0121 18:02:58.060926 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 21 18:02:58 crc kubenswrapper[4792]: I0121 18:02:58.065981 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 21 18:02:58 crc kubenswrapper[4792]: I0121 18:02:58.497021 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 21 18:02:59 crc kubenswrapper[4792]: I0121 18:02:59.027822 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 21 18:02:59 crc kubenswrapper[4792]: I0121 18:02:59.959415 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 21 18:03:00 crc kubenswrapper[4792]: I0121 18:03:00.196741 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 21 18:03:00 crc kubenswrapper[4792]: I0121 18:03:00.394172 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 21 18:03:00 crc kubenswrapper[4792]: I0121 18:03:00.611834 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 21 18:03:00 crc kubenswrapper[4792]: I0121 18:03:00.836070 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 21 18:03:00 crc kubenswrapper[4792]: I0121 18:03:00.841656 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 21 18:03:01 crc kubenswrapper[4792]: I0121 18:03:01.170933 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 21 18:03:01 crc kubenswrapper[4792]: I0121 18:03:01.380512 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 21 18:03:01 crc kubenswrapper[4792]: I0121 18:03:01.954361 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.246790 4792 scope.go:117] "RemoveContainer" containerID="52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.512175 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/2.log" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.512708 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerStarted","Data":"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2"} Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.513158 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.514716 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bdt7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.514783 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.611815 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.713528 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 21 18:03:02 crc kubenswrapper[4792]: I0121 18:03:02.879032 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.204783 4792 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.208367 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kk56x" podStartSLOduration=59.381375678 podStartE2EDuration="4m9.208328602s" podCreationTimestamp="2026-01-21 17:58:54 +0000 UTC" firstStartedPulling="2026-01-21 17:58:58.015122975 +0000 UTC m=+171.997086161" lastFinishedPulling="2026-01-21 18:02:07.842075899 +0000 UTC m=+361.824039085" observedRunningTime="2026-01-21 18:02:21.809728099 +0000 UTC m=+375.791691285" watchObservedRunningTime="2026-01-21 18:03:03.208328602 +0000 UTC m=+417.190291788" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.213533 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4kppq" podStartSLOduration=60.278008255 podStartE2EDuration="4m11.213510033s" podCreationTimestamp="2026-01-21 17:58:52 +0000 UTC" firstStartedPulling="2026-01-21 17:58:58.058150176 +0000 UTC m=+172.040113362" lastFinishedPulling="2026-01-21 18:02:08.993651954 +0000 UTC m=+362.975615140" observedRunningTime="2026-01-21 18:02:21.896149883 +0000 UTC m=+375.878113079" watchObservedRunningTime="2026-01-21 18:03:03.213510033 +0000 UTC m=+417.195473219" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.214362 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-drcjx" podStartSLOduration=67.536303901 podStartE2EDuration="4m11.214351725s" podCreationTimestamp="2026-01-21 17:58:52 +0000 UTC" firstStartedPulling="2026-01-21 17:58:57.954127299 +0000 UTC m=+171.936090485" lastFinishedPulling="2026-01-21 18:02:01.632175123 +0000 UTC m=+355.614138309" observedRunningTime="2026-01-21 18:02:21.879544796 +0000 UTC m=+375.861507992" watchObservedRunningTime="2026-01-21 18:03:03.214351725 +0000 UTC m=+417.196314911" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.214532 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2rld8" podStartSLOduration=65.564083979 podStartE2EDuration="4m8.21452277s" podCreationTimestamp="2026-01-21 17:58:55 +0000 UTC" firstStartedPulling="2026-01-21 17:58:59.070629156 +0000 UTC m=+173.052592342" lastFinishedPulling="2026-01-21 18:02:01.721067947 +0000 UTC m=+355.703031133" observedRunningTime="2026-01-21 18:02:21.862959579 +0000 UTC m=+375.844922765" watchObservedRunningTime="2026-01-21 18:03:03.21452277 +0000 UTC m=+417.196485956" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.215765 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5bd5w" podStartSLOduration=62.000428229 podStartE2EDuration="4m11.215726802s" podCreationTimestamp="2026-01-21 17:58:52 +0000 UTC" firstStartedPulling="2026-01-21 17:58:57.973612292 +0000 UTC m=+171.955575478" lastFinishedPulling="2026-01-21 18:02:07.188910865 +0000 UTC m=+361.170874051" observedRunningTime="2026-01-21 18:02:21.828450276 +0000 UTC m=+375.810413462" watchObservedRunningTime="2026-01-21 18:03:03.215726802 +0000 UTC m=+417.197689988" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.216274 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l85mb" podStartSLOduration=59.784483295 podStartE2EDuration="4m10.216155075s" podCreationTimestamp="2026-01-21 17:58:53 +0000 UTC" firstStartedPulling="2026-01-21 17:58:57.947987095 +0000 UTC m=+171.929950281" lastFinishedPulling="2026-01-21 18:02:08.379658875 +0000 UTC m=+362.361622061" observedRunningTime="2026-01-21 18:02:21.914619414 +0000 UTC m=+375.896582600" watchObservedRunningTime="2026-01-21 18:03:03.216155075 +0000 UTC m=+417.198118261" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.217586 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=114.217574213 podStartE2EDuration="1m54.217574213s" podCreationTimestamp="2026-01-21 18:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:02:21.768388554 +0000 UTC m=+375.750351740" watchObservedRunningTime="2026-01-21 18:03:03.217574213 +0000 UTC m=+417.199537399" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.217993 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c575g" podStartSLOduration=63.401485304 podStartE2EDuration="4m12.217986354s" podCreationTimestamp="2026-01-21 17:58:51 +0000 UTC" firstStartedPulling="2026-01-21 17:58:58.023063353 +0000 UTC m=+172.005026539" lastFinishedPulling="2026-01-21 18:02:06.839564403 +0000 UTC m=+360.821527589" observedRunningTime="2026-01-21 18:02:21.732629087 +0000 UTC m=+375.714592293" watchObservedRunningTime="2026-01-21 18:03:03.217986354 +0000 UTC m=+417.199949540" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.219322 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d4jqh" podStartSLOduration=65.419208773 podStartE2EDuration="4m9.21931565s" podCreationTimestamp="2026-01-21 17:58:54 +0000 UTC" firstStartedPulling="2026-01-21 17:58:57.942643882 +0000 UTC m=+171.924607058" lastFinishedPulling="2026-01-21 18:02:01.742750749 +0000 UTC m=+355.724713935" observedRunningTime="2026-01-21 18:02:21.945264617 +0000 UTC m=+375.927227803" watchObservedRunningTime="2026-01-21 18:03:03.21931565 +0000 UTC m=+417.201278836" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.220140 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.220289 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.221216 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.221289 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f6f746df-ab9c-430c-980c-241744c37dec" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.226175 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.243494 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=43.243474356 podStartE2EDuration="43.243474356s" podCreationTimestamp="2026-01-21 18:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:03:03.24323001 +0000 UTC m=+417.225193196" watchObservedRunningTime="2026-01-21 18:03:03.243474356 +0000 UTC m=+417.225437542" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.493444 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.521640 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/3.log" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.522429 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/2.log" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.522577 4792 generic.go:334] "Generic (PLEG): container finished" podID="a15d3491-6301-49f2-a196-df5db956aa82" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" exitCode=1 Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.522664 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2"} Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.522775 4792 scope.go:117] "RemoveContainer" containerID="52b83897d96e792544c8ec61bb60fc8dbb3bd5b4d69dee75d4c827d0353654c9" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.523413 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:03 crc kubenswrapper[4792]: E0121 18:03:03.523693 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 40s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:03:03 crc kubenswrapper[4792]: I0121 18:03:03.802655 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.336985 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.438255 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.532703 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/3.log" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.533795 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:04 crc kubenswrapper[4792]: E0121 18:03:04.534085 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 40s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.536660 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.595838 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.702214 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 21 18:03:04 crc kubenswrapper[4792]: I0121 18:03:04.848511 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.022257 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.051956 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.171452 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.210445 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.397065 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.498158 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.590377 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.758176 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 21 18:03:05 crc kubenswrapper[4792]: I0121 18:03:05.927592 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.370491 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.459294 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.504051 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.610952 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.625652 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.650690 4792 scope.go:117] "RemoveContainer" containerID="64a26bb9caca3c371707c814f53479521ca9b9cb77985db0dc2956bd57170c63" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.754440 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.795093 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.802054 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.892912 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.927430 4792 scope.go:117] "RemoveContainer" containerID="9efb8824ba69c49e1e83efcf4078c8febcae53767de2687e4e0c91776bbffca5" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.947438 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.952167 4792 scope.go:117] "RemoveContainer" containerID="1b68008a4fe6cad876b8bf8b55a6500261bf96e2dbeb4deac4f98b5fc96f57fc" Jan 21 18:03:06 crc kubenswrapper[4792]: I0121 18:03:06.974676 4792 scope.go:117] "RemoveContainer" containerID="ca868de86880247b3440515fe2d836217514c1153abab9e11b3a197a75dc2462" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.003867 4792 scope.go:117] "RemoveContainer" containerID="c373d5f8788382f104e8e651b7f73d3a39a0e6943a64b8cb2b4995fa7b96e4c8" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.063839 4792 scope.go:117] "RemoveContainer" containerID="55eaae5bc76088c59b7e3ed06f6e45a8e4e3eb9ecc5b56731ec0e329ae7ed9f5" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.267498 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.267591 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.277088 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.282120 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.477905 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dz4cq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.478025 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.478620 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dz4cq container/controller-manager namespace/openshift-controller-manager: Liveness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.478798 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.510003 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.544823 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.555788 4792 generic.go:334] "Generic (PLEG): container finished" podID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerID="553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615" exitCode=0 Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.555887 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerDied","Data":"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615"} Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.556828 4792 scope.go:117] "RemoveContainer" containerID="553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.562162 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.571986 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.759591 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 21 18:03:07 crc kubenswrapper[4792]: I0121 18:03:07.833299 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.171875 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.172384 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:09 crc kubenswrapper[4792]: E0121 18:03:09.172624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 40s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.179422 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.179502 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.179630 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.180300 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.182614 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.182729 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.189877 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.226900 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.247409 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerStarted","Data":"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27"} Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.248545 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.248641 4792 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dz4cq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.248678 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.459509 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.499809 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.621815 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.648280 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.702014 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.721822 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.837650 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.888444 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 21 18:03:09 crc kubenswrapper[4792]: I0121 18:03:09.972063 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 21 18:03:10 crc kubenswrapper[4792]: I0121 18:03:10.093664 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 21 18:03:10 crc kubenswrapper[4792]: I0121 18:03:10.136944 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 21 18:03:10 crc kubenswrapper[4792]: I0121 18:03:10.258489 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 18:03:10 crc kubenswrapper[4792]: I0121 18:03:10.447163 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 21 18:03:10 crc kubenswrapper[4792]: I0121 18:03:10.448085 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 21 18:03:11 crc kubenswrapper[4792]: I0121 18:03:11.101451 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 21 18:03:11 crc kubenswrapper[4792]: I0121 18:03:11.415055 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 21 18:03:11 crc kubenswrapper[4792]: I0121 18:03:11.615237 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 21 18:03:11 crc kubenswrapper[4792]: I0121 18:03:11.727168 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.144740 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.172096 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.279899 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.464177 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.530113 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.676014 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 21 18:03:12 crc kubenswrapper[4792]: I0121 18:03:12.817187 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.099952 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.119173 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.160149 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.285973 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-machine-approver_machine-approver-56656f9798-dvxl6_36a8e839-34d3-48ad-908e-4bcc8c996cca/machine-approver-controller/0.log" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.286543 4792 generic.go:334] "Generic (PLEG): container finished" podID="36a8e839-34d3-48ad-908e-4bcc8c996cca" containerID="50c5e39fdf793557878d9d42b3ecf13cb7677cad4176bde1b1113f8b96d5c83f" exitCode=255 Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.286608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" event={"ID":"36a8e839-34d3-48ad-908e-4bcc8c996cca","Type":"ContainerDied","Data":"50c5e39fdf793557878d9d42b3ecf13cb7677cad4176bde1b1113f8b96d5c83f"} Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.287267 4792 scope.go:117] "RemoveContainer" containerID="50c5e39fdf793557878d9d42b3ecf13cb7677cad4176bde1b1113f8b96d5c83f" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.510565 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.753912 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.766165 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.776602 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 18:03:13 crc kubenswrapper[4792]: I0121 18:03:13.894109 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.178132 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.218190 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.231708 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.296383 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-machine-approver_machine-approver-56656f9798-dvxl6_36a8e839-34d3-48ad-908e-4bcc8c996cca/machine-approver-controller/0.log" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.297013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dvxl6" event={"ID":"36a8e839-34d3-48ad-908e-4bcc8c996cca","Type":"ContainerStarted","Data":"7162481a0c2b3f4c60474c5c8a75c59f1094db6fd60187fab71a8f1ef270d2be"} Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.433970 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.576222 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.664994 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.741300 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.793616 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.802423 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.868042 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 21 18:03:14 crc kubenswrapper[4792]: I0121 18:03:14.953797 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.290521 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.305413 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/0.log" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.305472 4792 generic.go:334] "Generic (PLEG): container finished" podID="4a02b713-1a2c-43d9-9ed2-de57e40a2364" containerID="0222ee1077b295cdd6194355ea2ea13a9a34bd19225c372c0d1afe1feb57fc89" exitCode=1 Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.305516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" event={"ID":"4a02b713-1a2c-43d9-9ed2-de57e40a2364","Type":"ContainerDied","Data":"0222ee1077b295cdd6194355ea2ea13a9a34bd19225c372c0d1afe1feb57fc89"} Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.306125 4792 scope.go:117] "RemoveContainer" containerID="0222ee1077b295cdd6194355ea2ea13a9a34bd19225c372c0d1afe1feb57fc89" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.511921 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.685086 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 21 18:03:15 crc kubenswrapper[4792]: I0121 18:03:15.879995 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.159221 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.313385 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/0.log" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.313458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-r2mgl" event={"ID":"4a02b713-1a2c-43d9-9ed2-de57e40a2364","Type":"ContainerStarted","Data":"c6afa10360a7532b31fd6ac25c5a921277f77f8173cf2c9f62273879f4ab614c"} Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.330665 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.497321 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.718530 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.755350 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.772136 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.800328 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.801070 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 21 18:03:16 crc kubenswrapper[4792]: I0121 18:03:16.868024 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.118214 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.196273 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.292042 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.324571 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.354194 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.357367 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.368645 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.381248 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.466142 4792 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.466587 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50" gracePeriod=5 Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.587253 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.802482 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.805604 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 21 18:03:17 crc kubenswrapper[4792]: I0121 18:03:17.994355 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.017936 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.132078 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.595927 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.644210 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.682368 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.777977 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 21 18:03:18 crc kubenswrapper[4792]: I0121 18:03:18.821244 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.036204 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.102699 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.385102 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.486186 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.509245 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.510734 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.665565 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.681381 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 21 18:03:19 crc kubenswrapper[4792]: I0121 18:03:19.691114 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.015628 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.063979 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.085898 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.164828 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.358097 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.441338 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.492260 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.607584 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.633886 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.682950 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.755454 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.899991 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 21 18:03:20 crc kubenswrapper[4792]: I0121 18:03:20.981193 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.096206 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.243242 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.378480 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.409522 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.435151 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.512269 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.540959 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.541329 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.646763 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.902138 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 21 18:03:21 crc kubenswrapper[4792]: I0121 18:03:21.957269 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 21 18:03:22 crc kubenswrapper[4792]: I0121 18:03:22.494582 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 21 18:03:22 crc kubenswrapper[4792]: I0121 18:03:22.556427 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 21 18:03:22 crc kubenswrapper[4792]: I0121 18:03:22.832948 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 21 18:03:22 crc kubenswrapper[4792]: I0121 18:03:22.922469 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.084144 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.186731 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.186910 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.236805 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.236936 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237072 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237086 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237110 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237119 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.237345 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.238110 4792 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.238157 4792 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.238177 4792 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.238196 4792 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.246524 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:23 crc kubenswrapper[4792]: E0121 18:03:23.247281 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 40s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.248806 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.339836 4792 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.343767 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.391467 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.404173 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.404231 4792 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50" exitCode=137 Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.404279 4792 scope.go:117] "RemoveContainer" containerID="5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.404323 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.428101 4792 scope.go:117] "RemoveContainer" containerID="5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50" Jan 21 18:03:23 crc kubenswrapper[4792]: E0121 18:03:23.428459 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50\": container with ID starting with 5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50 not found: ID does not exist" containerID="5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.428572 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50"} err="failed to get container status \"5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50\": rpc error: code = NotFound desc = could not find container \"5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50\": container with ID starting with 5ba01c5f975d9904ab03c08ba78e1d6a0cb8ad3cde1c0351385a6001fc108b50 not found: ID does not exist" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.570012 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.570073 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.619237 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.808157 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.923014 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 21 18:03:23 crc kubenswrapper[4792]: I0121 18:03:23.963895 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.025097 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.253356 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.253599 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.264954 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.265003 4792 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="9c06611c-05e5-4a33-8c11-b41ad298044e" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.269055 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.269118 4792 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="9c06611c-05e5-4a33-8c11-b41ad298044e" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.349819 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.419501 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.457100 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.457675 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.587193 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 21 18:03:24 crc kubenswrapper[4792]: I0121 18:03:24.983061 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 21 18:03:25 crc kubenswrapper[4792]: I0121 18:03:25.379215 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 21 18:03:25 crc kubenswrapper[4792]: I0121 18:03:25.530265 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 21 18:03:25 crc kubenswrapper[4792]: I0121 18:03:25.834694 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 21 18:03:26 crc kubenswrapper[4792]: I0121 18:03:26.247171 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 21 18:03:27 crc kubenswrapper[4792]: I0121 18:03:27.332401 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 21 18:03:27 crc kubenswrapper[4792]: I0121 18:03:27.649698 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 21 18:03:27 crc kubenswrapper[4792]: I0121 18:03:27.681027 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 18:03:27 crc kubenswrapper[4792]: I0121 18:03:27.889448 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 21 18:03:28 crc kubenswrapper[4792]: I0121 18:03:28.152406 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 21 18:03:28 crc kubenswrapper[4792]: I0121 18:03:28.180996 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 21 18:03:28 crc kubenswrapper[4792]: I0121 18:03:28.604940 4792 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 21 18:03:28 crc kubenswrapper[4792]: I0121 18:03:28.669282 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 21 18:03:29 crc kubenswrapper[4792]: I0121 18:03:29.579746 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 21 18:03:29 crc kubenswrapper[4792]: I0121 18:03:29.655516 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 21 18:03:29 crc kubenswrapper[4792]: I0121 18:03:29.940525 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 18:03:30 crc kubenswrapper[4792]: I0121 18:03:30.270629 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 21 18:03:30 crc kubenswrapper[4792]: I0121 18:03:30.274636 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 21 18:03:30 crc kubenswrapper[4792]: I0121 18:03:30.985691 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 21 18:03:31 crc kubenswrapper[4792]: I0121 18:03:31.370015 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 21 18:03:32 crc kubenswrapper[4792]: I0121 18:03:32.337053 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 21 18:03:32 crc kubenswrapper[4792]: I0121 18:03:32.369807 4792 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 21 18:03:32 crc kubenswrapper[4792]: I0121 18:03:32.538307 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 21 18:03:32 crc kubenswrapper[4792]: I0121 18:03:32.734949 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 21 18:03:32 crc kubenswrapper[4792]: I0121 18:03:32.751913 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 21 18:03:33 crc kubenswrapper[4792]: I0121 18:03:33.149712 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 21 18:03:33 crc kubenswrapper[4792]: I0121 18:03:33.189371 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 21 18:03:33 crc kubenswrapper[4792]: I0121 18:03:33.287627 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 21 18:03:33 crc kubenswrapper[4792]: I0121 18:03:33.364776 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 21 18:03:33 crc kubenswrapper[4792]: I0121 18:03:33.768908 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 21 18:03:34 crc kubenswrapper[4792]: I0121 18:03:34.676402 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 21 18:03:34 crc kubenswrapper[4792]: I0121 18:03:34.689139 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 21 18:03:35 crc kubenswrapper[4792]: I0121 18:03:35.010430 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 21 18:03:35 crc kubenswrapper[4792]: I0121 18:03:35.048103 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 21 18:03:35 crc kubenswrapper[4792]: I0121 18:03:35.247325 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:35 crc kubenswrapper[4792]: E0121 18:03:35.247777 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 40s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-bdt7b_openshift-marketplace(a15d3491-6301-49f2-a196-df5db956aa82)\"" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" Jan 21 18:03:35 crc kubenswrapper[4792]: I0121 18:03:35.650333 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 21 18:03:35 crc kubenswrapper[4792]: I0121 18:03:35.917288 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 21 18:03:36 crc kubenswrapper[4792]: I0121 18:03:36.315003 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 21 18:03:36 crc kubenswrapper[4792]: I0121 18:03:36.624109 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 21 18:03:37 crc kubenswrapper[4792]: I0121 18:03:37.734530 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 21 18:03:37 crc kubenswrapper[4792]: I0121 18:03:37.825076 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 21 18:03:38 crc kubenswrapper[4792]: I0121 18:03:38.143074 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 18:03:39 crc kubenswrapper[4792]: I0121 18:03:39.453572 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 21 18:03:39 crc kubenswrapper[4792]: I0121 18:03:39.753509 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 21 18:03:39 crc kubenswrapper[4792]: I0121 18:03:39.941970 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 21 18:03:43 crc kubenswrapper[4792]: I0121 18:03:43.132714 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 21 18:03:43 crc kubenswrapper[4792]: I0121 18:03:43.438337 4792 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.249479 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.558273 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/3.log" Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.558362 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerStarted","Data":"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c"} Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.559430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.561695 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bdt7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Jan 21 18:03:46 crc kubenswrapper[4792]: I0121 18:03:46.561757 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Jan 21 18:03:47 crc kubenswrapper[4792]: I0121 18:03:47.196593 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 21 18:03:47 crc kubenswrapper[4792]: I0121 18:03:47.442129 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 21 18:03:47 crc kubenswrapper[4792]: I0121 18:03:47.566361 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:03:53 crc kubenswrapper[4792]: I0121 18:03:53.570594 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:03:53 crc kubenswrapper[4792]: I0121 18:03:53.571166 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:03:53 crc kubenswrapper[4792]: I0121 18:03:53.571234 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:03:53 crc kubenswrapper[4792]: I0121 18:03:53.572088 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:03:53 crc kubenswrapper[4792]: I0121 18:03:53.572155 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51" gracePeriod=600 Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.624995 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51" exitCode=0 Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.625677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51"} Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.625715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214"} Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.625732 4792 scope.go:117] "RemoveContainer" containerID="f7e7b6843f867e0fab8360dffb642074b774b0c7bf4cb93e2360489ba08ecdba" Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.948936 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 18:03:54 crc kubenswrapper[4792]: I0121 18:03:54.949228 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" containerID="cri-o://a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27" gracePeriod=30 Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.039100 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.039404 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" containerID="cri-o://71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c" gracePeriod=30 Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.504602 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.572778 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.603511 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles\") pod \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.603593 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf55v\" (UniqueName: \"kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v\") pod \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.604827 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" (UID: "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.605257 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert\") pod \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.605318 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config\") pod \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.607212 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config" (OuterVolumeSpecName: "config") pod "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" (UID: "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.612526 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" (UID: "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.612590 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v" (OuterVolumeSpecName: "kube-api-access-wf55v") pod "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" (UID: "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46"). InnerVolumeSpecName "kube-api-access-wf55v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.612795 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca\") pod \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\" (UID: \"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613382 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca" (OuterVolumeSpecName: "client-ca") pod "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" (UID: "3afd98a4-e47a-4e22-ab23-0cbf1bf56e46"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613792 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613811 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf55v\" (UniqueName: \"kubernetes.io/projected/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-kube-api-access-wf55v\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613824 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613833 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.613854 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.639337 4792 generic.go:334] "Generic (PLEG): container finished" podID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerID="a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27" exitCode=0 Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.639410 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerDied","Data":"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27"} Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.639442 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" event={"ID":"3afd98a4-e47a-4e22-ab23-0cbf1bf56e46","Type":"ContainerDied","Data":"87b9569dfdf626a2abeb1a0ee7585c2a639feb30c0d1ba36e7f194b8ab04aa8b"} Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.639466 4792 scope.go:117] "RemoveContainer" containerID="a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.639628 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dz4cq" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.643778 4792 generic.go:334] "Generic (PLEG): container finished" podID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerID="71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c" exitCode=0 Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.643996 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" event={"ID":"39822312-6707-4de1-8cc6-5ab1f513ebf7","Type":"ContainerDied","Data":"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c"} Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.644077 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" event={"ID":"39822312-6707-4de1-8cc6-5ab1f513ebf7","Type":"ContainerDied","Data":"3a7156982ed014c57ac0fb01f847336ab3a2c69d4e064ff270c19db15b7fb1b2"} Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.644109 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.669079 4792 scope.go:117] "RemoveContainer" containerID="553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.671710 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.675805 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dz4cq"] Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.704207 4792 scope.go:117] "RemoveContainer" containerID="a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27" Jan 21 18:03:55 crc kubenswrapper[4792]: E0121 18:03:55.705352 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27\": container with ID starting with a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27 not found: ID does not exist" containerID="a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.705394 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27"} err="failed to get container status \"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27\": rpc error: code = NotFound desc = could not find container \"a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27\": container with ID starting with a52850c529f806319022e0d1f5b8ced234f9c96eff56fbdd68cb1682f8092b27 not found: ID does not exist" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.705422 4792 scope.go:117] "RemoveContainer" containerID="553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615" Jan 21 18:03:55 crc kubenswrapper[4792]: E0121 18:03:55.705946 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615\": container with ID starting with 553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615 not found: ID does not exist" containerID="553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.705970 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615"} err="failed to get container status \"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615\": rpc error: code = NotFound desc = could not find container \"553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615\": container with ID starting with 553770c21b346fdab8c4b42c921e8e972ffbf6cf9549dcfb66f2d2d4ad885615 not found: ID does not exist" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.705989 4792 scope.go:117] "RemoveContainer" containerID="71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.715467 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca\") pod \"39822312-6707-4de1-8cc6-5ab1f513ebf7\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.715561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert\") pod \"39822312-6707-4de1-8cc6-5ab1f513ebf7\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.715680 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckwfz\" (UniqueName: \"kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz\") pod \"39822312-6707-4de1-8cc6-5ab1f513ebf7\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.715755 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config\") pod \"39822312-6707-4de1-8cc6-5ab1f513ebf7\" (UID: \"39822312-6707-4de1-8cc6-5ab1f513ebf7\") " Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.717459 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca" (OuterVolumeSpecName: "client-ca") pod "39822312-6707-4de1-8cc6-5ab1f513ebf7" (UID: "39822312-6707-4de1-8cc6-5ab1f513ebf7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.717491 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config" (OuterVolumeSpecName: "config") pod "39822312-6707-4de1-8cc6-5ab1f513ebf7" (UID: "39822312-6707-4de1-8cc6-5ab1f513ebf7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.721301 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz" (OuterVolumeSpecName: "kube-api-access-ckwfz") pod "39822312-6707-4de1-8cc6-5ab1f513ebf7" (UID: "39822312-6707-4de1-8cc6-5ab1f513ebf7"). InnerVolumeSpecName "kube-api-access-ckwfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.721318 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "39822312-6707-4de1-8cc6-5ab1f513ebf7" (UID: "39822312-6707-4de1-8cc6-5ab1f513ebf7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.727629 4792 scope.go:117] "RemoveContainer" containerID="71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c" Jan 21 18:03:55 crc kubenswrapper[4792]: E0121 18:03:55.728234 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c\": container with ID starting with 71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c not found: ID does not exist" containerID="71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.728285 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c"} err="failed to get container status \"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c\": rpc error: code = NotFound desc = could not find container \"71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c\": container with ID starting with 71dfc512f4e5d44369584af3406534ddf824d7bab621f74ebd63289d0b38dd8c not found: ID does not exist" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.818529 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39822312-6707-4de1-8cc6-5ab1f513ebf7-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.819112 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckwfz\" (UniqueName: \"kubernetes.io/projected/39822312-6707-4de1-8cc6-5ab1f513ebf7-kube-api-access-ckwfz\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.819159 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.819178 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39822312-6707-4de1-8cc6-5ab1f513ebf7-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:55 crc kubenswrapper[4792]: I0121 18:03:55.993845 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.000804 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-42ml5"] Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.277056 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" path="/var/lib/kubelet/pods/39822312-6707-4de1-8cc6-5ab1f513ebf7/volumes" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.277626 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" path="/var/lib/kubelet/pods/3afd98a4-e47a-4e22-ab23-0cbf1bf56e46/volumes" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.325571 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.326020 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-drcjx" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="registry-server" containerID="cri-o://228a9e14f5c9c643cf4e9ebb7486d726a2ed1621ea7cc2ae0c732f0fc5476bd2" gracePeriod=2 Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.529309 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.530324 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5bd5w" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="registry-server" containerID="cri-o://9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011" gracePeriod=2 Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.654910 4792 generic.go:334] "Generic (PLEG): container finished" podID="cc09a117-7791-479f-8477-46cd94afcc09" containerID="228a9e14f5c9c643cf4e9ebb7486d726a2ed1621ea7cc2ae0c732f0fc5476bd2" exitCode=0 Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.654978 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerDied","Data":"228a9e14f5c9c643cf4e9ebb7486d726a2ed1621ea7cc2ae0c732f0fc5476bd2"} Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.729252 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.775324 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfvzm\" (UniqueName: \"kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm\") pod \"cc09a117-7791-479f-8477-46cd94afcc09\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.775387 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content\") pod \"cc09a117-7791-479f-8477-46cd94afcc09\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.775410 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities\") pod \"cc09a117-7791-479f-8477-46cd94afcc09\" (UID: \"cc09a117-7791-479f-8477-46cd94afcc09\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.776584 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities" (OuterVolumeSpecName: "utilities") pod "cc09a117-7791-479f-8477-46cd94afcc09" (UID: "cc09a117-7791-479f-8477-46cd94afcc09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.785481 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm" (OuterVolumeSpecName: "kube-api-access-wfvzm") pod "cc09a117-7791-479f-8477-46cd94afcc09" (UID: "cc09a117-7791-479f-8477-46cd94afcc09"). InnerVolumeSpecName "kube-api-access-wfvzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.820791 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc09a117-7791-479f-8477-46cd94afcc09" (UID: "cc09a117-7791-479f-8477-46cd94afcc09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.864309 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876437 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content\") pod \"c18e5f7c-5ebf-4166-82ab-c29a81232623\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876494 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8l5j\" (UniqueName: \"kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j\") pod \"c18e5f7c-5ebf-4166-82ab-c29a81232623\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876519 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities\") pod \"c18e5f7c-5ebf-4166-82ab-c29a81232623\" (UID: \"c18e5f7c-5ebf-4166-82ab-c29a81232623\") " Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876673 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfvzm\" (UniqueName: \"kubernetes.io/projected/cc09a117-7791-479f-8477-46cd94afcc09-kube-api-access-wfvzm\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876685 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.876694 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc09a117-7791-479f-8477-46cd94afcc09-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.878176 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities" (OuterVolumeSpecName: "utilities") pod "c18e5f7c-5ebf-4166-82ab-c29a81232623" (UID: "c18e5f7c-5ebf-4166-82ab-c29a81232623"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.880990 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j" (OuterVolumeSpecName: "kube-api-access-h8l5j") pod "c18e5f7c-5ebf-4166-82ab-c29a81232623" (UID: "c18e5f7c-5ebf-4166-82ab-c29a81232623"). InnerVolumeSpecName "kube-api-access-h8l5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.939280 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c18e5f7c-5ebf-4166-82ab-c29a81232623" (UID: "c18e5f7c-5ebf-4166-82ab-c29a81232623"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.978645 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.978695 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c18e5f7c-5ebf-4166-82ab-c29a81232623-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:56 crc kubenswrapper[4792]: I0121 18:03:56.978712 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8l5j\" (UniqueName: \"kubernetes.io/projected/c18e5f7c-5ebf-4166-82ab-c29a81232623-kube-api-access-h8l5j\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066078 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066513 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="extract-utilities" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066535 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="extract-utilities" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066545 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" containerName="installer" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066553 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" containerName="installer" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066565 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066573 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066586 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="extract-content" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066595 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="extract-content" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066606 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="extract-content" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066617 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="extract-content" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066630 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066638 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066647 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066869 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066879 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066886 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066898 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066905 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066918 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066925 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.066936 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="extract-utilities" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.066943 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="extract-utilities" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067075 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067092 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="39822312-6707-4de1-8cc6-5ab1f513ebf7" containerName="route-controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067102 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="68bfd337-d05a-40cf-bcbb-6d4f44788aff" containerName="installer" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067111 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc09a117-7791-479f-8477-46cd94afcc09" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067119 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067129 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerName="registry-server" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.067714 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.072032 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.072311 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.072663 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.073226 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.073906 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.074344 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3afd98a4-e47a-4e22-ab23-0cbf1bf56e46" containerName="controller-manager" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.074911 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.080011 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.080606 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.080670 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.080814 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.081735 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.081748 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.081748 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.082078 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.083636 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.085905 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.100416 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186062 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186159 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9wwf\" (UniqueName: \"kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186196 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssfnz\" (UniqueName: \"kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186222 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186280 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186364 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.186389 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288207 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9wwf\" (UniqueName: \"kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288263 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssfnz\" (UniqueName: \"kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288294 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288334 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288356 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288460 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288484 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288517 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.288585 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.291778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.291794 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.292827 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.292870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.295406 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.296141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.301906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.315331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9wwf\" (UniqueName: \"kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf\") pod \"controller-manager-8cd75dbb6-7n9b6\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.321461 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssfnz\" (UniqueName: \"kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz\") pod \"route-controller-manager-dc5d576d8-5pwhf\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.425606 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.439153 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.672696 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.676647 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drcjx" event={"ID":"cc09a117-7791-479f-8477-46cd94afcc09","Type":"ContainerDied","Data":"5b4bfb4d495bea0232909cfeeafb46cba51faa6f3aef967e20cc95fbbbae7999"} Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.676736 4792 scope.go:117] "RemoveContainer" containerID="228a9e14f5c9c643cf4e9ebb7486d726a2ed1621ea7cc2ae0c732f0fc5476bd2" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.676977 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drcjx" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.691219 4792 generic.go:334] "Generic (PLEG): container finished" podID="c18e5f7c-5ebf-4166-82ab-c29a81232623" containerID="9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011" exitCode=0 Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.691293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerDied","Data":"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011"} Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.691340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5bd5w" event={"ID":"c18e5f7c-5ebf-4166-82ab-c29a81232623","Type":"ContainerDied","Data":"150a55bed7e9513c397ce4e671fabf25663c64e74ffaa84f229b17701da84c35"} Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.691447 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5bd5w" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.704102 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.711090 4792 scope.go:117] "RemoveContainer" containerID="1a2d00816c8d9a596d8b64b1265476172ccac7f8d93040e81422922c3e3ccc82" Jan 21 18:03:57 crc kubenswrapper[4792]: W0121 18:03:57.717443 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e1cd038_d467_424c_ab68_cff7c07c897e.slice/crio-b8bbc6fa2efbb8b44ea18389b035c6463f82596aa3bb7e2218f9938911b8cf1f WatchSource:0}: Error finding container b8bbc6fa2efbb8b44ea18389b035c6463f82596aa3bb7e2218f9938911b8cf1f: Status 404 returned error can't find the container with id b8bbc6fa2efbb8b44ea18389b035c6463f82596aa3bb7e2218f9938911b8cf1f Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.729259 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.745432 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-drcjx"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.748167 4792 scope.go:117] "RemoveContainer" containerID="614c69936623a2134225c9bf6d9ad211ff1ac4a07ef6b62a3bd439f913dc0599" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.750964 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.754954 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5bd5w"] Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.765345 4792 scope.go:117] "RemoveContainer" containerID="9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.785496 4792 scope.go:117] "RemoveContainer" containerID="2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.822282 4792 scope.go:117] "RemoveContainer" containerID="93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.848011 4792 scope.go:117] "RemoveContainer" containerID="9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.849635 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011\": container with ID starting with 9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011 not found: ID does not exist" containerID="9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.850641 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011"} err="failed to get container status \"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011\": rpc error: code = NotFound desc = could not find container \"9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011\": container with ID starting with 9386dce00c5baa1e699a4bf4e33c2aecdf1530639509a490228058a6f8dd8011 not found: ID does not exist" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.850700 4792 scope.go:117] "RemoveContainer" containerID="2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.851285 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a\": container with ID starting with 2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a not found: ID does not exist" containerID="2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.851336 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a"} err="failed to get container status \"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a\": rpc error: code = NotFound desc = could not find container \"2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a\": container with ID starting with 2328d494197e3a18e7e6f4ac107701095842e8b3739b9fcbdf8cb923618ce69a not found: ID does not exist" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.851368 4792 scope.go:117] "RemoveContainer" containerID="93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d" Jan 21 18:03:57 crc kubenswrapper[4792]: E0121 18:03:57.851976 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d\": container with ID starting with 93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d not found: ID does not exist" containerID="93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d" Jan 21 18:03:57 crc kubenswrapper[4792]: I0121 18:03:57.852034 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d"} err="failed to get container status \"93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d\": rpc error: code = NotFound desc = could not find container \"93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d\": container with ID starting with 93568df998a00dd4632eedcf41c80ce0c901479ee389023b0fd4338d31dc477d not found: ID does not exist" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.209537 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.261153 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c18e5f7c-5ebf-4166-82ab-c29a81232623" path="/var/lib/kubelet/pods/c18e5f7c-5ebf-4166-82ab-c29a81232623/volumes" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.262801 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc09a117-7791-479f-8477-46cd94afcc09" path="/var/lib/kubelet/pods/cc09a117-7791-479f-8477-46cd94afcc09/volumes" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.703269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" event={"ID":"3687213f-bd4b-430f-8bd7-d345f4a467e0","Type":"ContainerStarted","Data":"967877bc333e1a226f9ebc2b2b1d099cb006692a35698224150b806eb59b184b"} Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.703340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" event={"ID":"3687213f-bd4b-430f-8bd7-d345f4a467e0","Type":"ContainerStarted","Data":"916628966ac39f526bf3ea3c2c3737245b970f726a285f6ed36857d0f7535045"} Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.703639 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.705255 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" event={"ID":"3e1cd038-d467-424c-ab68-cff7c07c897e","Type":"ContainerStarted","Data":"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9"} Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.705339 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" event={"ID":"3e1cd038-d467-424c-ab68-cff7c07c897e","Type":"ContainerStarted","Data":"b8bbc6fa2efbb8b44ea18389b035c6463f82596aa3bb7e2218f9938911b8cf1f"} Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.705987 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.709563 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.713195 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.726649 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" podStartSLOduration=3.726627085 podStartE2EDuration="3.726627085s" podCreationTimestamp="2026-01-21 18:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:03:58.724442215 +0000 UTC m=+472.706405391" watchObservedRunningTime="2026-01-21 18:03:58.726627085 +0000 UTC m=+472.708590271" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.747019 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" podStartSLOduration=3.746996487 podStartE2EDuration="3.746996487s" podCreationTimestamp="2026-01-21 18:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:03:58.744933151 +0000 UTC m=+472.726896357" watchObservedRunningTime="2026-01-21 18:03:58.746996487 +0000 UTC m=+472.728959673" Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.928706 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 18:03:58 crc kubenswrapper[4792]: I0121 18:03:58.929132 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kk56x" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="registry-server" containerID="cri-o://f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c" gracePeriod=2 Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.131117 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.132030 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2rld8" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="registry-server" containerID="cri-o://a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a" gracePeriod=2 Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.326203 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.485651 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.523313 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dj48\" (UniqueName: \"kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48\") pod \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.523419 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content\") pod \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.523448 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities\") pod \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\" (UID: \"3ee97d71-6a45-4d65-9242-fe34a6d15f52\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.525160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities" (OuterVolumeSpecName: "utilities") pod "3ee97d71-6a45-4d65-9242-fe34a6d15f52" (UID: "3ee97d71-6a45-4d65-9242-fe34a6d15f52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.531451 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48" (OuterVolumeSpecName: "kube-api-access-7dj48") pod "3ee97d71-6a45-4d65-9242-fe34a6d15f52" (UID: "3ee97d71-6a45-4d65-9242-fe34a6d15f52"). InnerVolumeSpecName "kube-api-access-7dj48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.546637 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ee97d71-6a45-4d65-9242-fe34a6d15f52" (UID: "3ee97d71-6a45-4d65-9242-fe34a6d15f52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.625457 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content\") pod \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.626050 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz58k\" (UniqueName: \"kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k\") pod \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.626108 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities\") pod \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\" (UID: \"93c3d3cc-1042-4c4d-bc8c-80360b6ca310\") " Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.626403 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dj48\" (UniqueName: \"kubernetes.io/projected/3ee97d71-6a45-4d65-9242-fe34a6d15f52-kube-api-access-7dj48\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.626416 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.626474 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ee97d71-6a45-4d65-9242-fe34a6d15f52-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.627327 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities" (OuterVolumeSpecName: "utilities") pod "93c3d3cc-1042-4c4d-bc8c-80360b6ca310" (UID: "93c3d3cc-1042-4c4d-bc8c-80360b6ca310"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.629050 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k" (OuterVolumeSpecName: "kube-api-access-tz58k") pod "93c3d3cc-1042-4c4d-bc8c-80360b6ca310" (UID: "93c3d3cc-1042-4c4d-bc8c-80360b6ca310"). InnerVolumeSpecName "kube-api-access-tz58k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.721066 4792 generic.go:334] "Generic (PLEG): container finished" podID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerID="f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c" exitCode=0 Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.721151 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kk56x" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.721148 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerDied","Data":"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c"} Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.722414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kk56x" event={"ID":"3ee97d71-6a45-4d65-9242-fe34a6d15f52","Type":"ContainerDied","Data":"83cebf25ab16a1aa2458a9dfbe045994a1b9dc99576f14ae0daa2b78033e9683"} Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.722475 4792 scope.go:117] "RemoveContainer" containerID="f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.724772 4792 generic.go:334] "Generic (PLEG): container finished" podID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerID="a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a" exitCode=0 Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.725019 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2rld8" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.725004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerDied","Data":"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a"} Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.725090 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2rld8" event={"ID":"93c3d3cc-1042-4c4d-bc8c-80360b6ca310","Type":"ContainerDied","Data":"e5e38f4e6779ed8fc453c5df95e39e3197c8180118b8c60e6a20d879051d2a51"} Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.727182 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz58k\" (UniqueName: \"kubernetes.io/projected/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-kube-api-access-tz58k\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.727214 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.746697 4792 scope.go:117] "RemoveContainer" containerID="e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.759809 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.765751 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kk56x"] Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.774103 4792 scope.go:117] "RemoveContainer" containerID="9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.775340 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "93c3d3cc-1042-4c4d-bc8c-80360b6ca310" (UID: "93c3d3cc-1042-4c4d-bc8c-80360b6ca310"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.797351 4792 scope.go:117] "RemoveContainer" containerID="f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.798631 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c\": container with ID starting with f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c not found: ID does not exist" containerID="f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.798716 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c"} err="failed to get container status \"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c\": rpc error: code = NotFound desc = could not find container \"f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c\": container with ID starting with f72d96ce9dcf5bf123676a4c6b3732f5f6477f12d10ae682c5d68460d4f7444c not found: ID does not exist" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.798802 4792 scope.go:117] "RemoveContainer" containerID="e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.799407 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e\": container with ID starting with e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e not found: ID does not exist" containerID="e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.799433 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e"} err="failed to get container status \"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e\": rpc error: code = NotFound desc = could not find container \"e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e\": container with ID starting with e74421d5a0dbe762e0b601480e8c027226aeb6d0f66ac076882c3fd43634061e not found: ID does not exist" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.799447 4792 scope.go:117] "RemoveContainer" containerID="9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.799756 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747\": container with ID starting with 9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747 not found: ID does not exist" containerID="9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.799786 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747"} err="failed to get container status \"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747\": rpc error: code = NotFound desc = could not find container \"9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747\": container with ID starting with 9807b73506a649c95ec88f5ac3552aa38a94217c527dc0b87efbf9b295417747 not found: ID does not exist" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.799818 4792 scope.go:117] "RemoveContainer" containerID="a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.819917 4792 scope.go:117] "RemoveContainer" containerID="ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.829168 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93c3d3cc-1042-4c4d-bc8c-80360b6ca310-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.846149 4792 scope.go:117] "RemoveContainer" containerID="fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.863807 4792 scope.go:117] "RemoveContainer" containerID="a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.864383 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a\": container with ID starting with a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a not found: ID does not exist" containerID="a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.864449 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a"} err="failed to get container status \"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a\": rpc error: code = NotFound desc = could not find container \"a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a\": container with ID starting with a43cb8e48612c0400b3e8c544b205714aaaa46c2ca73d16ba49e38d53a256f0a not found: ID does not exist" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.864495 4792 scope.go:117] "RemoveContainer" containerID="ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.864872 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444\": container with ID starting with ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444 not found: ID does not exist" containerID="ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.864916 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444"} err="failed to get container status \"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444\": rpc error: code = NotFound desc = could not find container \"ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444\": container with ID starting with ea5043f96a5c0f96bdd67734bfbf882cc1d36c8fec2436289e7c82d856509444 not found: ID does not exist" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.864949 4792 scope.go:117] "RemoveContainer" containerID="fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a" Jan 21 18:03:59 crc kubenswrapper[4792]: E0121 18:03:59.865671 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a\": container with ID starting with fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a not found: ID does not exist" containerID="fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a" Jan 21 18:03:59 crc kubenswrapper[4792]: I0121 18:03:59.865727 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a"} err="failed to get container status \"fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a\": rpc error: code = NotFound desc = could not find container \"fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a\": container with ID starting with fe2abba28c527719cfc0a40e70c888b18c80705ba94d43f76d263c2b974a234a not found: ID does not exist" Jan 21 18:04:00 crc kubenswrapper[4792]: I0121 18:04:00.054988 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 18:04:00 crc kubenswrapper[4792]: I0121 18:04:00.057945 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2rld8"] Jan 21 18:04:00 crc kubenswrapper[4792]: I0121 18:04:00.260073 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" path="/var/lib/kubelet/pods/3ee97d71-6a45-4d65-9242-fe34a6d15f52/volumes" Jan 21 18:04:00 crc kubenswrapper[4792]: I0121 18:04:00.260867 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" path="/var/lib/kubelet/pods/93c3d3cc-1042-4c4d-bc8c-80360b6ca310/volumes" Jan 21 18:04:08 crc kubenswrapper[4792]: I0121 18:04:08.995645 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 21 18:04:14 crc kubenswrapper[4792]: I0121 18:04:14.927105 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:04:14 crc kubenswrapper[4792]: I0121 18:04:14.928002 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" podUID="3e1cd038-d467-424c-ab68-cff7c07c897e" containerName="route-controller-manager" containerID="cri-o://e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9" gracePeriod=30 Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.388192 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.588752 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca\") pod \"3e1cd038-d467-424c-ab68-cff7c07c897e\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.588921 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssfnz\" (UniqueName: \"kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz\") pod \"3e1cd038-d467-424c-ab68-cff7c07c897e\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.588977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config\") pod \"3e1cd038-d467-424c-ab68-cff7c07c897e\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.589009 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert\") pod \"3e1cd038-d467-424c-ab68-cff7c07c897e\" (UID: \"3e1cd038-d467-424c-ab68-cff7c07c897e\") " Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.589891 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca" (OuterVolumeSpecName: "client-ca") pod "3e1cd038-d467-424c-ab68-cff7c07c897e" (UID: "3e1cd038-d467-424c-ab68-cff7c07c897e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.590003 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config" (OuterVolumeSpecName: "config") pod "3e1cd038-d467-424c-ab68-cff7c07c897e" (UID: "3e1cd038-d467-424c-ab68-cff7c07c897e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.590362 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.590391 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1cd038-d467-424c-ab68-cff7c07c897e-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.601919 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz" (OuterVolumeSpecName: "kube-api-access-ssfnz") pod "3e1cd038-d467-424c-ab68-cff7c07c897e" (UID: "3e1cd038-d467-424c-ab68-cff7c07c897e"). InnerVolumeSpecName "kube-api-access-ssfnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.606362 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3e1cd038-d467-424c-ab68-cff7c07c897e" (UID: "3e1cd038-d467-424c-ab68-cff7c07c897e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.692019 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssfnz\" (UniqueName: \"kubernetes.io/projected/3e1cd038-d467-424c-ab68-cff7c07c897e-kube-api-access-ssfnz\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.692072 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e1cd038-d467-424c-ab68-cff7c07c897e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.829873 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e1cd038-d467-424c-ab68-cff7c07c897e" containerID="e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9" exitCode=0 Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.829934 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" event={"ID":"3e1cd038-d467-424c-ab68-cff7c07c897e","Type":"ContainerDied","Data":"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9"} Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.829956 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.829971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf" event={"ID":"3e1cd038-d467-424c-ab68-cff7c07c897e","Type":"ContainerDied","Data":"b8bbc6fa2efbb8b44ea18389b035c6463f82596aa3bb7e2218f9938911b8cf1f"} Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.829992 4792 scope.go:117] "RemoveContainer" containerID="e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.852153 4792 scope.go:117] "RemoveContainer" containerID="e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9" Jan 21 18:04:15 crc kubenswrapper[4792]: E0121 18:04:15.852764 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9\": container with ID starting with e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9 not found: ID does not exist" containerID="e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.852816 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9"} err="failed to get container status \"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9\": rpc error: code = NotFound desc = could not find container \"e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9\": container with ID starting with e5572f6589a8db13d7a2c9f799781b861e9cd828cdbe578c8cf849fa2d7f63a9 not found: ID does not exist" Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.860776 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:04:15 crc kubenswrapper[4792]: I0121 18:04:15.864392 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc5d576d8-5pwhf"] Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075235 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075476 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e1cd038-d467-424c-ab68-cff7c07c897e" containerName="route-controller-manager" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075493 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e1cd038-d467-424c-ab68-cff7c07c897e" containerName="route-controller-manager" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075507 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="extract-utilities" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075513 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="extract-utilities" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075531 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="extract-content" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075540 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="extract-content" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075551 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="extract-content" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075559 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="extract-content" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075574 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075581 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075592 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="extract-utilities" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075600 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="extract-utilities" Jan 21 18:04:16 crc kubenswrapper[4792]: E0121 18:04:16.075611 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075618 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075735 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ee97d71-6a45-4d65-9242-fe34a6d15f52" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075749 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="93c3d3cc-1042-4c4d-bc8c-80360b6ca310" containerName="registry-server" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.075756 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e1cd038-d467-424c-ab68-cff7c07c897e" containerName="route-controller-manager" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.076308 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.079607 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.079718 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.079743 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.080005 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.082619 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.084381 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.098619 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.098699 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.098768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.098792 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k687\" (UniqueName: \"kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.102727 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.200046 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.200119 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k687\" (UniqueName: \"kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.200194 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.200233 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.202108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.202181 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.205518 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.226347 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k687\" (UniqueName: \"kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687\") pod \"route-controller-manager-6696d76777-8bfds\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.253606 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e1cd038-d467-424c-ab68-cff7c07c897e" path="/var/lib/kubelet/pods/3e1cd038-d467-424c-ab68-cff7c07c897e/volumes" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.403932 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.594137 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:04:16 crc kubenswrapper[4792]: W0121 18:04:16.601364 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e553386_8570_488a_89ff_0afd3560c0f9.slice/crio-a72dec80a13be8d93e34c591867e154a1c7005bf4535199eefc6b3892846f5a1 WatchSource:0}: Error finding container a72dec80a13be8d93e34c591867e154a1c7005bf4535199eefc6b3892846f5a1: Status 404 returned error can't find the container with id a72dec80a13be8d93e34c591867e154a1c7005bf4535199eefc6b3892846f5a1 Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.838785 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" event={"ID":"1e553386-8570-488a-89ff-0afd3560c0f9","Type":"ContainerStarted","Data":"2690804ddb6072d6c02889d33373a642955f299f525a07252d2ab40e9db00019"} Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.838881 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" event={"ID":"1e553386-8570-488a-89ff-0afd3560c0f9","Type":"ContainerStarted","Data":"a72dec80a13be8d93e34c591867e154a1c7005bf4535199eefc6b3892846f5a1"} Jan 21 18:04:16 crc kubenswrapper[4792]: I0121 18:04:16.840548 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:17 crc kubenswrapper[4792]: I0121 18:04:17.095332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:04:17 crc kubenswrapper[4792]: I0121 18:04:17.121809 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" podStartSLOduration=3.121771382 podStartE2EDuration="3.121771382s" podCreationTimestamp="2026-01-21 18:04:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:04:16.86005133 +0000 UTC m=+490.842014516" watchObservedRunningTime="2026-01-21 18:04:17.121771382 +0000 UTC m=+491.103734578" Jan 21 18:04:41 crc kubenswrapper[4792]: I0121 18:04:41.964332 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x48g6"] Jan 21 18:04:41 crc kubenswrapper[4792]: I0121 18:04:41.966012 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:41 crc kubenswrapper[4792]: I0121 18:04:41.981345 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x48g6"] Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e9113d4-81a4-4206-8a0a-1201fa0dab29-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-certificates\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107452 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e9113d4-81a4-4206-8a0a-1201fa0dab29-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107533 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-tls\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-trusted-ca\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-bound-sa-token\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107753 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gglbs\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-kube-api-access-gglbs\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.107793 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.130962 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209064 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-certificates\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209130 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e9113d4-81a4-4206-8a0a-1201fa0dab29-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-tls\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209194 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-trusted-ca\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-bound-sa-token\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209248 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gglbs\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-kube-api-access-gglbs\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.209294 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e9113d4-81a4-4206-8a0a-1201fa0dab29-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.210030 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7e9113d4-81a4-4206-8a0a-1201fa0dab29-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.210971 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-certificates\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.211228 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7e9113d4-81a4-4206-8a0a-1201fa0dab29-trusted-ca\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.219554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7e9113d4-81a4-4206-8a0a-1201fa0dab29-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.219585 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-registry-tls\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.226493 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-bound-sa-token\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.226658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gglbs\" (UniqueName: \"kubernetes.io/projected/7e9113d4-81a4-4206-8a0a-1201fa0dab29-kube-api-access-gglbs\") pod \"image-registry-66df7c8f76-x48g6\" (UID: \"7e9113d4-81a4-4206-8a0a-1201fa0dab29\") " pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.286904 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:42 crc kubenswrapper[4792]: I0121 18:04:42.513453 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x48g6"] Jan 21 18:04:43 crc kubenswrapper[4792]: I0121 18:04:43.005530 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" event={"ID":"7e9113d4-81a4-4206-8a0a-1201fa0dab29","Type":"ContainerStarted","Data":"02e4047aa22e41dac314fbc4c41b5c48341662a28b006379462a7952d9c59d28"} Jan 21 18:04:43 crc kubenswrapper[4792]: I0121 18:04:43.005968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" event={"ID":"7e9113d4-81a4-4206-8a0a-1201fa0dab29","Type":"ContainerStarted","Data":"4cc7eebd52067c60239e56ae2d26ae495dbd2715ddf358f9f18788493c5e7696"} Jan 21 18:04:43 crc kubenswrapper[4792]: I0121 18:04:43.006346 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:04:43 crc kubenswrapper[4792]: I0121 18:04:43.033879 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" podStartSLOduration=2.033837322 podStartE2EDuration="2.033837322s" podCreationTimestamp="2026-01-21 18:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:04:43.029064292 +0000 UTC m=+517.011027478" watchObservedRunningTime="2026-01-21 18:04:43.033837322 +0000 UTC m=+517.015800528" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.563656 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.566350 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4kppq" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="registry-server" containerID="cri-o://cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4" gracePeriod=30 Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.570794 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.571162 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c575g" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="registry-server" containerID="cri-o://b6584bed35fdc8e912f8a486a70ff330f77dd4411792d788dff8f09e34010af9" gracePeriod=30 Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.583009 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.583708 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" containerID="cri-o://e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c" gracePeriod=30 Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.590539 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.603738 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.604078 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d4jqh" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="registry-server" containerID="cri-o://7be40c72c549b12d3b6ea6b84125bb04e6a21170fbc5bc2cd26f187c872e2e75" gracePeriod=30 Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.613263 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmngq"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.614284 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.637358 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmngq"] Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.763694 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-697x2\" (UniqueName: \"kubernetes.io/projected/d21d6d31-01f6-40c1-817b-cc87735ae2f6-kube-api-access-697x2\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.763830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.763957 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.865999 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.866556 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-697x2\" (UniqueName: \"kubernetes.io/projected/d21d6d31-01f6-40c1-817b-cc87735ae2f6-kube-api-access-697x2\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.866631 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.867671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.876279 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d21d6d31-01f6-40c1-817b-cc87735ae2f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.892455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-697x2\" (UniqueName: \"kubernetes.io/projected/d21d6d31-01f6-40c1-817b-cc87735ae2f6-kube-api-access-697x2\") pod \"marketplace-operator-79b997595-nmngq\" (UID: \"d21d6d31-01f6-40c1-817b-cc87735ae2f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:45 crc kubenswrapper[4792]: I0121 18:04:45.948349 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.044172 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/3.log" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.044268 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.058831 4792 generic.go:334] "Generic (PLEG): container finished" podID="106746c3-4570-4081-90ec-a6f1afd6dade" containerID="7be40c72c549b12d3b6ea6b84125bb04e6a21170fbc5bc2cd26f187c872e2e75" exitCode=0 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.059135 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerDied","Data":"7be40c72c549b12d3b6ea6b84125bb04e6a21170fbc5bc2cd26f187c872e2e75"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.087065 4792 generic.go:334] "Generic (PLEG): container finished" podID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerID="b6584bed35fdc8e912f8a486a70ff330f77dd4411792d788dff8f09e34010af9" exitCode=0 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.087260 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerDied","Data":"b6584bed35fdc8e912f8a486a70ff330f77dd4411792d788dff8f09e34010af9"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.087310 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c575g" event={"ID":"1018828e-7a21-4a36-83a9-c87d6aaa38c3","Type":"ContainerDied","Data":"d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.087323 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3a688dfcd5c3e37f59d32dc48a985ae5a757c5d4b661d68fc3e1f36ac114950" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.095788 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.109497 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bdt7b_a15d3491-6301-49f2-a196-df5db956aa82/marketplace-operator/3.log" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.109575 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.109606 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.109659 4792 scope.go:117] "RemoveContainer" containerID="e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.109574 4792 generic.go:334] "Generic (PLEG): container finished" podID="a15d3491-6301-49f2-a196-df5db956aa82" containerID="e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c" exitCode=0 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.110011 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.110058 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bdt7b" event={"ID":"a15d3491-6301-49f2-a196-df5db956aa82","Type":"ContainerDied","Data":"3f6770720a48e2918b1cc3bbf02c0f2cb9eadce225da564cf677e9e6ca443588"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.127173 4792 generic.go:334] "Generic (PLEG): container finished" podID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerID="cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4" exitCode=0 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.127379 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kppq" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.127680 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerDied","Data":"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.127712 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kppq" event={"ID":"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd","Type":"ContainerDied","Data":"f46744ef1a6b65e9000d0860e4e4370e7eb4cc519ea49e32cca967544d6298af"} Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.127430 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l85mb" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="registry-server" containerID="cri-o://422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475" gracePeriod=30 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.166985 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content\") pod \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174306 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqtg2\" (UniqueName: \"kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2\") pod \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca\") pod \"a15d3491-6301-49f2-a196-df5db956aa82\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174433 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities\") pod \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\" (UID: \"bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174472 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hbkp\" (UniqueName: \"kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp\") pod \"a15d3491-6301-49f2-a196-df5db956aa82\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.174494 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics\") pod \"a15d3491-6301-49f2-a196-df5db956aa82\" (UID: \"a15d3491-6301-49f2-a196-df5db956aa82\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.176003 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a15d3491-6301-49f2-a196-df5db956aa82" (UID: "a15d3491-6301-49f2-a196-df5db956aa82"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.178081 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities" (OuterVolumeSpecName: "utilities") pod "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" (UID: "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.185110 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a15d3491-6301-49f2-a196-df5db956aa82" (UID: "a15d3491-6301-49f2-a196-df5db956aa82"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.185359 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp" (OuterVolumeSpecName: "kube-api-access-5hbkp") pod "a15d3491-6301-49f2-a196-df5db956aa82" (UID: "a15d3491-6301-49f2-a196-df5db956aa82"). InnerVolumeSpecName "kube-api-access-5hbkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.192773 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2" (OuterVolumeSpecName: "kube-api-access-zqtg2") pod "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" (UID: "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd"). InnerVolumeSpecName "kube-api-access-zqtg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.194745 4792 scope.go:117] "RemoveContainer" containerID="e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c" Jan 21 18:04:46 crc kubenswrapper[4792]: E0121 18:04:46.195312 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c\": container with ID starting with e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c not found: ID does not exist" containerID="e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.195359 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c"} err="failed to get container status \"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c\": rpc error: code = NotFound desc = could not find container \"e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c\": container with ID starting with e8defabfb923c96a2ce9b08b239072c43fa7acc56cf255a6348ada3d81de6e1c not found: ID does not exist" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.195438 4792 scope.go:117] "RemoveContainer" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:04:46 crc kubenswrapper[4792]: E0121 18:04:46.195898 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2\": container with ID starting with 827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2 not found: ID does not exist" containerID="827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.195934 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2"} err="failed to get container status \"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2\": rpc error: code = NotFound desc = could not find container \"827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2\": container with ID starting with 827aeb26800964cccb2fdd5cd9e5498dd910f8ddadacfcd890a5882ca7bba6e2 not found: ID does not exist" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.195981 4792 scope.go:117] "RemoveContainer" containerID="cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.216936 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.226102 4792 scope.go:117] "RemoveContainer" containerID="3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.229299 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" (UID: "bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.260126 4792 scope.go:117] "RemoveContainer" containerID="056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.278756 4792 scope.go:117] "RemoveContainer" containerID="cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.279346 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities\") pod \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.279545 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content\") pod \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.279613 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkmrw\" (UniqueName: \"kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw\") pod \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\" (UID: \"1018828e-7a21-4a36-83a9-c87d6aaa38c3\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280595 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280616 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hbkp\" (UniqueName: \"kubernetes.io/projected/a15d3491-6301-49f2-a196-df5db956aa82-kube-api-access-5hbkp\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280628 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280637 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280750 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqtg2\" (UniqueName: \"kubernetes.io/projected/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd-kube-api-access-zqtg2\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280763 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a15d3491-6301-49f2-a196-df5db956aa82-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: E0121 18:04:46.280779 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4\": container with ID starting with cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4 not found: ID does not exist" containerID="cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280823 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4"} err="failed to get container status \"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4\": rpc error: code = NotFound desc = could not find container \"cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4\": container with ID starting with cd8f3aaef77ed4b850c80cc49480c0b2dc10447b8a8a33768ef7e10fa9b386c4 not found: ID does not exist" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280884 4792 scope.go:117] "RemoveContainer" containerID="3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.280951 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities" (OuterVolumeSpecName: "utilities") pod "1018828e-7a21-4a36-83a9-c87d6aaa38c3" (UID: "1018828e-7a21-4a36-83a9-c87d6aaa38c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: E0121 18:04:46.281518 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096\": container with ID starting with 3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096 not found: ID does not exist" containerID="3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.281906 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096"} err="failed to get container status \"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096\": rpc error: code = NotFound desc = could not find container \"3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096\": container with ID starting with 3c6a19b2e18dd1c62329861ffb1df28161f3ff4bf8b723838a677fd954974096 not found: ID does not exist" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.281933 4792 scope.go:117] "RemoveContainer" containerID="056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f" Jan 21 18:04:46 crc kubenswrapper[4792]: E0121 18:04:46.282577 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f\": container with ID starting with 056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f not found: ID does not exist" containerID="056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.282603 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f"} err="failed to get container status \"056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f\": rpc error: code = NotFound desc = could not find container \"056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f\": container with ID starting with 056c40bb94649ec92bda3b3854a66c3f5412361dad988399fac837071ad8262f not found: ID does not exist" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.285803 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw" (OuterVolumeSpecName: "kube-api-access-zkmrw") pod "1018828e-7a21-4a36-83a9-c87d6aaa38c3" (UID: "1018828e-7a21-4a36-83a9-c87d6aaa38c3"). InnerVolumeSpecName "kube-api-access-zkmrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.344525 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1018828e-7a21-4a36-83a9-c87d6aaa38c3" (UID: "1018828e-7a21-4a36-83a9-c87d6aaa38c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content\") pod \"106746c3-4570-4081-90ec-a6f1afd6dade\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382163 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cvsq\" (UniqueName: \"kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq\") pod \"106746c3-4570-4081-90ec-a6f1afd6dade\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382256 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities\") pod \"106746c3-4570-4081-90ec-a6f1afd6dade\" (UID: \"106746c3-4570-4081-90ec-a6f1afd6dade\") " Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382563 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382577 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1018828e-7a21-4a36-83a9-c87d6aaa38c3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.382589 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkmrw\" (UniqueName: \"kubernetes.io/projected/1018828e-7a21-4a36-83a9-c87d6aaa38c3-kube-api-access-zkmrw\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.383344 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities" (OuterVolumeSpecName: "utilities") pod "106746c3-4570-4081-90ec-a6f1afd6dade" (UID: "106746c3-4570-4081-90ec-a6f1afd6dade"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.386908 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq" (OuterVolumeSpecName: "kube-api-access-9cvsq") pod "106746c3-4570-4081-90ec-a6f1afd6dade" (UID: "106746c3-4570-4081-90ec-a6f1afd6dade"). InnerVolumeSpecName "kube-api-access-9cvsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.448023 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmngq"] Jan 21 18:04:46 crc kubenswrapper[4792]: W0121 18:04:46.468610 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd21d6d31_01f6_40c1_817b_cc87735ae2f6.slice/crio-2fa46bcafb2f7ac0d266712dd2b8b1fa4ce8773c0b0cf5d7f8750f1f02298200 WatchSource:0}: Error finding container 2fa46bcafb2f7ac0d266712dd2b8b1fa4ce8773c0b0cf5d7f8750f1f02298200: Status 404 returned error can't find the container with id 2fa46bcafb2f7ac0d266712dd2b8b1fa4ce8773c0b0cf5d7f8750f1f02298200 Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.483891 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.484670 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cvsq\" (UniqueName: \"kubernetes.io/projected/106746c3-4570-4081-90ec-a6f1afd6dade-kube-api-access-9cvsq\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.496503 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.501216 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bdt7b"] Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.516511 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "106746c3-4570-4081-90ec-a6f1afd6dade" (UID: "106746c3-4570-4081-90ec-a6f1afd6dade"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.517285 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.523338 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4kppq"] Jan 21 18:04:46 crc kubenswrapper[4792]: I0121 18:04:46.586184 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/106746c3-4570-4081-90ec-a6f1afd6dade-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.057413 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.137687 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4jqh" event={"ID":"106746c3-4570-4081-90ec-a6f1afd6dade","Type":"ContainerDied","Data":"912e239f3e17301de26252cf0de8f6b5607e8eebcd9fb4345c33fb32e725ca34"} Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.138218 4792 scope.go:117] "RemoveContainer" containerID="7be40c72c549b12d3b6ea6b84125bb04e6a21170fbc5bc2cd26f187c872e2e75" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.137718 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4jqh" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.141000 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerID="422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475" exitCode=0 Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.141072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerDied","Data":"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475"} Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.141101 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l85mb" event={"ID":"e6f739f0-719a-4454-bbaf-b4b5c624f084","Type":"ContainerDied","Data":"707aaad46246aebf33837e4b3abb662c1da5afaf27122cfe4534cc788139cc7d"} Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.141185 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l85mb" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.147190 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c575g" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.148505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" event={"ID":"d21d6d31-01f6-40c1-817b-cc87735ae2f6","Type":"ContainerStarted","Data":"702796df04f54baead5d6b98a608bb1b9dfb948432076462749f32b1205893a6"} Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.148569 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" event={"ID":"d21d6d31-01f6-40c1-817b-cc87735ae2f6","Type":"ContainerStarted","Data":"2fa46bcafb2f7ac0d266712dd2b8b1fa4ce8773c0b0cf5d7f8750f1f02298200"} Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.148593 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.160196 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.175793 4792 scope.go:117] "RemoveContainer" containerID="ce1309e19d5401417e1707e0e55aab57aaf4b5c25cfb581828be7dc257263dba" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.189322 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nmngq" podStartSLOduration=2.189284625 podStartE2EDuration="2.189284625s" podCreationTimestamp="2026-01-21 18:04:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:04:47.174488014 +0000 UTC m=+521.156451220" watchObservedRunningTime="2026-01-21 18:04:47.189284625 +0000 UTC m=+521.171247801" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.197494 4792 scope.go:117] "RemoveContainer" containerID="23d89e97943eb3d1d4c420f74abcfc9e5cd0fe912dcc5343dd416b054d8c074d" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.199191 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content\") pod \"e6f739f0-719a-4454-bbaf-b4b5c624f084\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.199756 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88l7z\" (UniqueName: \"kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z\") pod \"e6f739f0-719a-4454-bbaf-b4b5c624f084\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.200613 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities\") pod \"e6f739f0-719a-4454-bbaf-b4b5c624f084\" (UID: \"e6f739f0-719a-4454-bbaf-b4b5c624f084\") " Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.201336 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities" (OuterVolumeSpecName: "utilities") pod "e6f739f0-719a-4454-bbaf-b4b5c624f084" (UID: "e6f739f0-719a-4454-bbaf-b4b5c624f084"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.203392 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.204104 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.211189 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d4jqh"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.213244 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z" (OuterVolumeSpecName: "kube-api-access-88l7z") pod "e6f739f0-719a-4454-bbaf-b4b5c624f084" (UID: "e6f739f0-719a-4454-bbaf-b4b5c624f084"). InnerVolumeSpecName "kube-api-access-88l7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.232075 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.236586 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c575g"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.239015 4792 scope.go:117] "RemoveContainer" containerID="422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.241041 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6f739f0-719a-4454-bbaf-b4b5c624f084" (UID: "e6f739f0-719a-4454-bbaf-b4b5c624f084"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.260952 4792 scope.go:117] "RemoveContainer" containerID="19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.291939 4792 scope.go:117] "RemoveContainer" containerID="1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.306107 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6f739f0-719a-4454-bbaf-b4b5c624f084-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.306144 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88l7z\" (UniqueName: \"kubernetes.io/projected/e6f739f0-719a-4454-bbaf-b4b5c624f084-kube-api-access-88l7z\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.325241 4792 scope.go:117] "RemoveContainer" containerID="422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.325792 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475\": container with ID starting with 422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475 not found: ID does not exist" containerID="422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.325821 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475"} err="failed to get container status \"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475\": rpc error: code = NotFound desc = could not find container \"422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475\": container with ID starting with 422782ed0f6c84054d8de9616394a89530aa8eb556a2135c8e4e3ba7bdccd475 not found: ID does not exist" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.325841 4792 scope.go:117] "RemoveContainer" containerID="19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.326459 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c\": container with ID starting with 19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c not found: ID does not exist" containerID="19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.326552 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c"} err="failed to get container status \"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c\": rpc error: code = NotFound desc = could not find container \"19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c\": container with ID starting with 19750ad0531ae71c16142ee722088155a15e7029bcc15eafa8ebf4db1d096d8c not found: ID does not exist" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.326585 4792 scope.go:117] "RemoveContainer" containerID="1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.327039 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12\": container with ID starting with 1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12 not found: ID does not exist" containerID="1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.327065 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12"} err="failed to get container status \"1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12\": rpc error: code = NotFound desc = could not find container \"1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12\": container with ID starting with 1208399a0f6cf74b9caa828c3ccaf1356ea27b63e5f7a724b983e1b9be4d1e12 not found: ID does not exist" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.480643 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.491884 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l85mb"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.604391 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.607874 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.607925 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.607951 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.607964 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.607978 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608001 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608021 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608033 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608050 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608103 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608123 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608136 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608151 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608165 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608181 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608193 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608208 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608221 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608235 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608247 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608268 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608280 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608298 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608310 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608328 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608341 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="extract-content" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608357 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608368 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608391 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608403 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="extract-utilities" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608595 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608615 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608635 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608654 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608667 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608688 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608703 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" containerName="registry-server" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.608911 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.608926 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.609079 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: E0121 18:04:47.609237 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.609247 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.609386 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15d3491-6301-49f2-a196-df5db956aa82" containerName="marketplace-operator" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.610197 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.610918 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.613450 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.712763 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.712961 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.713010 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gkkv\" (UniqueName: \"kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.802760 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gpzvv"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.804140 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.807665 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.815407 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.815517 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gkkv\" (UniqueName: \"kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.815577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.816360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.816804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.820004 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gpzvv"] Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.854679 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gkkv\" (UniqueName: \"kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv\") pod \"certified-operators-9crgd\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.917720 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-utilities\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.918019 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-catalog-content\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.918335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl4nj\" (UniqueName: \"kubernetes.io/projected/0c48ac10-ac54-4582-9fd8-e739820affc6-kube-api-access-kl4nj\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:47 crc kubenswrapper[4792]: I0121 18:04:47.931216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.020865 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl4nj\" (UniqueName: \"kubernetes.io/projected/0c48ac10-ac54-4582-9fd8-e739820affc6-kube-api-access-kl4nj\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.020952 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-utilities\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.021014 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-catalog-content\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.021682 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-catalog-content\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.021803 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c48ac10-ac54-4582-9fd8-e739820affc6-utilities\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.051061 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl4nj\" (UniqueName: \"kubernetes.io/projected/0c48ac10-ac54-4582-9fd8-e739820affc6-kube-api-access-kl4nj\") pod \"community-operators-gpzvv\" (UID: \"0c48ac10-ac54-4582-9fd8-e739820affc6\") " pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.148006 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.170525 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:04:48 crc kubenswrapper[4792]: W0121 18:04:48.187417 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8948a28_6967_4016_b98c_643621434b69.slice/crio-9be69fcba5a58cd54d23ceb83907f1a93232ed6905be08ac8b1a1c143b54edb9 WatchSource:0}: Error finding container 9be69fcba5a58cd54d23ceb83907f1a93232ed6905be08ac8b1a1c143b54edb9: Status 404 returned error can't find the container with id 9be69fcba5a58cd54d23ceb83907f1a93232ed6905be08ac8b1a1c143b54edb9 Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.263729 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1018828e-7a21-4a36-83a9-c87d6aaa38c3" path="/var/lib/kubelet/pods/1018828e-7a21-4a36-83a9-c87d6aaa38c3/volumes" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.265439 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="106746c3-4570-4081-90ec-a6f1afd6dade" path="/var/lib/kubelet/pods/106746c3-4570-4081-90ec-a6f1afd6dade/volumes" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.266314 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a15d3491-6301-49f2-a196-df5db956aa82" path="/var/lib/kubelet/pods/a15d3491-6301-49f2-a196-df5db956aa82/volumes" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.269112 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd" path="/var/lib/kubelet/pods/bf2f54b1-35a4-4c74-a656-68dfb3e5e2bd/volumes" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.269804 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6f739f0-719a-4454-bbaf-b4b5c624f084" path="/var/lib/kubelet/pods/e6f739f0-719a-4454-bbaf-b4b5c624f084/volumes" Jan 21 18:04:48 crc kubenswrapper[4792]: I0121 18:04:48.428180 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gpzvv"] Jan 21 18:04:48 crc kubenswrapper[4792]: W0121 18:04:48.480938 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c48ac10_ac54_4582_9fd8_e739820affc6.slice/crio-d17cf5aaae057d55146e52ccf7219e347827db2d8f979d39e115c5cf5d5e32dd WatchSource:0}: Error finding container d17cf5aaae057d55146e52ccf7219e347827db2d8f979d39e115c5cf5d5e32dd: Status 404 returned error can't find the container with id d17cf5aaae057d55146e52ccf7219e347827db2d8f979d39e115c5cf5d5e32dd Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.164107 4792 generic.go:334] "Generic (PLEG): container finished" podID="f8948a28-6967-4016-b98c-643621434b69" containerID="c720b10d4c87713c45b218e99b3cfc85a4704d16c838418015135dd78ed32ade" exitCode=0 Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.164189 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerDied","Data":"c720b10d4c87713c45b218e99b3cfc85a4704d16c838418015135dd78ed32ade"} Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.164224 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerStarted","Data":"9be69fcba5a58cd54d23ceb83907f1a93232ed6905be08ac8b1a1c143b54edb9"} Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.166385 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.168740 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c48ac10-ac54-4582-9fd8-e739820affc6" containerID="b54b88b0feb2b62b4641813ad5e54a49b4cd031abcae113ac6cc10d2c3f16d69" exitCode=0 Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.168839 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gpzvv" event={"ID":"0c48ac10-ac54-4582-9fd8-e739820affc6","Type":"ContainerDied","Data":"b54b88b0feb2b62b4641813ad5e54a49b4cd031abcae113ac6cc10d2c3f16d69"} Jan 21 18:04:49 crc kubenswrapper[4792]: I0121 18:04:49.168905 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gpzvv" event={"ID":"0c48ac10-ac54-4582-9fd8-e739820affc6","Type":"ContainerStarted","Data":"d17cf5aaae057d55146e52ccf7219e347827db2d8f979d39e115c5cf5d5e32dd"} Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.020646 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.024539 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.031968 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.032474 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.150588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmkt8\" (UniqueName: \"kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.150659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.150904 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.202501 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z4zsz"] Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.207304 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.209675 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.210600 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z4zsz"] Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.252379 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.252449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmkt8\" (UniqueName: \"kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.252488 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.253004 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.253230 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.286474 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmkt8\" (UniqueName: \"kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8\") pod \"redhat-marketplace-rcgcm\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.344359 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.353391 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79cpc\" (UniqueName: \"kubernetes.io/projected/1374e517-0337-44da-b942-0c7163208087-kube-api-access-79cpc\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.353510 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-utilities\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.353590 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-catalog-content\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.455316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-catalog-content\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.455874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79cpc\" (UniqueName: \"kubernetes.io/projected/1374e517-0337-44da-b942-0c7163208087-kube-api-access-79cpc\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.455897 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-catalog-content\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.455920 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-utilities\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.456363 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1374e517-0337-44da-b942-0c7163208087-utilities\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.488425 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79cpc\" (UniqueName: \"kubernetes.io/projected/1374e517-0337-44da-b942-0c7163208087-kube-api-access-79cpc\") pod \"redhat-operators-z4zsz\" (UID: \"1374e517-0337-44da-b942-0c7163208087\") " pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.545246 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.563194 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:04:50 crc kubenswrapper[4792]: I0121 18:04:50.800505 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z4zsz"] Jan 21 18:04:51 crc kubenswrapper[4792]: I0121 18:04:51.183544 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerStarted","Data":"fa71c738f7906e9a6190ee01d69e4e31bdee5974cf3cf0d15d1c0896b2bdd165"} Jan 21 18:04:51 crc kubenswrapper[4792]: W0121 18:04:51.493916 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1374e517_0337_44da_b942_0c7163208087.slice/crio-97154c7bd77c270a1dd5cf9f317b971df2bc4e412aab6afd53f2a569de2ba6f6 WatchSource:0}: Error finding container 97154c7bd77c270a1dd5cf9f317b971df2bc4e412aab6afd53f2a569de2ba6f6: Status 404 returned error can't find the container with id 97154c7bd77c270a1dd5cf9f317b971df2bc4e412aab6afd53f2a569de2ba6f6 Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.190109 4792 generic.go:334] "Generic (PLEG): container finished" podID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerID="f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3" exitCode=0 Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.190452 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerDied","Data":"f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3"} Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.192651 4792 generic.go:334] "Generic (PLEG): container finished" podID="1374e517-0337-44da-b942-0c7163208087" containerID="cb90ecfbd621e21a961ac9349544ab68e16a6c8618e07345458c2535d0d59a84" exitCode=0 Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.192716 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4zsz" event={"ID":"1374e517-0337-44da-b942-0c7163208087","Type":"ContainerDied","Data":"cb90ecfbd621e21a961ac9349544ab68e16a6c8618e07345458c2535d0d59a84"} Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.192761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4zsz" event={"ID":"1374e517-0337-44da-b942-0c7163208087","Type":"ContainerStarted","Data":"97154c7bd77c270a1dd5cf9f317b971df2bc4e412aab6afd53f2a569de2ba6f6"} Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.196008 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gpzvv" event={"ID":"0c48ac10-ac54-4582-9fd8-e739820affc6","Type":"ContainerStarted","Data":"650e527a62b8bdb32e176e003fd1c8aec2a2b04b83332f036b7e439b1735bed0"} Jan 21 18:04:52 crc kubenswrapper[4792]: I0121 18:04:52.197754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerStarted","Data":"0f872b3f86899d603aff9862aa38241167f04e9818a87f895558ca5c8f01ec1b"} Jan 21 18:04:53 crc kubenswrapper[4792]: I0121 18:04:53.205613 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c48ac10-ac54-4582-9fd8-e739820affc6" containerID="650e527a62b8bdb32e176e003fd1c8aec2a2b04b83332f036b7e439b1735bed0" exitCode=0 Jan 21 18:04:53 crc kubenswrapper[4792]: I0121 18:04:53.205730 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gpzvv" event={"ID":"0c48ac10-ac54-4582-9fd8-e739820affc6","Type":"ContainerDied","Data":"650e527a62b8bdb32e176e003fd1c8aec2a2b04b83332f036b7e439b1735bed0"} Jan 21 18:04:53 crc kubenswrapper[4792]: I0121 18:04:53.208221 4792 generic.go:334] "Generic (PLEG): container finished" podID="f8948a28-6967-4016-b98c-643621434b69" containerID="0f872b3f86899d603aff9862aa38241167f04e9818a87f895558ca5c8f01ec1b" exitCode=0 Jan 21 18:04:53 crc kubenswrapper[4792]: I0121 18:04:53.208284 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerDied","Data":"0f872b3f86899d603aff9862aa38241167f04e9818a87f895558ca5c8f01ec1b"} Jan 21 18:04:53 crc kubenswrapper[4792]: I0121 18:04:53.213250 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerStarted","Data":"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2"} Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.220784 4792 generic.go:334] "Generic (PLEG): container finished" podID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerID="f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2" exitCode=0 Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.220981 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerDied","Data":"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2"} Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.229196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4zsz" event={"ID":"1374e517-0337-44da-b942-0c7163208087","Type":"ContainerStarted","Data":"6a650743fdb3e63b414ec8361559fba972d07dd6bb70f424640e507b1d1adb91"} Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.232712 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gpzvv" event={"ID":"0c48ac10-ac54-4582-9fd8-e739820affc6","Type":"ContainerStarted","Data":"ddbfacebfe377b479506ee5441eecc68be7912c021796b5794560a67de804a7b"} Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.235288 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerStarted","Data":"3662f31ad5b7e08c8a520f730a925f67eec209455f73d7458909a295b37bddbd"} Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.268601 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gpzvv" podStartSLOduration=2.510327305 podStartE2EDuration="7.268569757s" podCreationTimestamp="2026-01-21 18:04:47 +0000 UTC" firstStartedPulling="2026-01-21 18:04:49.171094556 +0000 UTC m=+523.153057742" lastFinishedPulling="2026-01-21 18:04:53.929337008 +0000 UTC m=+527.911300194" observedRunningTime="2026-01-21 18:04:54.268167785 +0000 UTC m=+528.250130971" watchObservedRunningTime="2026-01-21 18:04:54.268569757 +0000 UTC m=+528.250532943" Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.293029 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9crgd" podStartSLOduration=2.828759589 podStartE2EDuration="7.293013449s" podCreationTimestamp="2026-01-21 18:04:47 +0000 UTC" firstStartedPulling="2026-01-21 18:04:49.166128579 +0000 UTC m=+523.148091765" lastFinishedPulling="2026-01-21 18:04:53.630382439 +0000 UTC m=+527.612345625" observedRunningTime="2026-01-21 18:04:54.291007964 +0000 UTC m=+528.272971150" watchObservedRunningTime="2026-01-21 18:04:54.293013449 +0000 UTC m=+528.274976635" Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.994193 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:04:54 crc kubenswrapper[4792]: I0121 18:04:54.994637 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" podUID="3687213f-bd4b-430f-8bd7-d345f4a467e0" containerName="controller-manager" containerID="cri-o://967877bc333e1a226f9ebc2b2b1d099cb006692a35698224150b806eb59b184b" gracePeriod=30 Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.247388 4792 generic.go:334] "Generic (PLEG): container finished" podID="3687213f-bd4b-430f-8bd7-d345f4a467e0" containerID="967877bc333e1a226f9ebc2b2b1d099cb006692a35698224150b806eb59b184b" exitCode=0 Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.247556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" event={"ID":"3687213f-bd4b-430f-8bd7-d345f4a467e0","Type":"ContainerDied","Data":"967877bc333e1a226f9ebc2b2b1d099cb006692a35698224150b806eb59b184b"} Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.251667 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerStarted","Data":"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584"} Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.260289 4792 generic.go:334] "Generic (PLEG): container finished" podID="1374e517-0337-44da-b942-0c7163208087" containerID="6a650743fdb3e63b414ec8361559fba972d07dd6bb70f424640e507b1d1adb91" exitCode=0 Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.260546 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4zsz" event={"ID":"1374e517-0337-44da-b942-0c7163208087","Type":"ContainerDied","Data":"6a650743fdb3e63b414ec8361559fba972d07dd6bb70f424640e507b1d1adb91"} Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.285706 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rcgcm" podStartSLOduration=3.719395592 podStartE2EDuration="6.285686895s" podCreationTimestamp="2026-01-21 18:04:49 +0000 UTC" firstStartedPulling="2026-01-21 18:04:52.19187067 +0000 UTC m=+526.173833856" lastFinishedPulling="2026-01-21 18:04:54.758161973 +0000 UTC m=+528.740125159" observedRunningTime="2026-01-21 18:04:55.284273536 +0000 UTC m=+529.266236732" watchObservedRunningTime="2026-01-21 18:04:55.285686895 +0000 UTC m=+529.267650081" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.433480 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.539574 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles\") pod \"3687213f-bd4b-430f-8bd7-d345f4a467e0\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.539949 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9wwf\" (UniqueName: \"kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf\") pod \"3687213f-bd4b-430f-8bd7-d345f4a467e0\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.539985 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca\") pod \"3687213f-bd4b-430f-8bd7-d345f4a467e0\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.540004 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert\") pod \"3687213f-bd4b-430f-8bd7-d345f4a467e0\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.540068 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config\") pod \"3687213f-bd4b-430f-8bd7-d345f4a467e0\" (UID: \"3687213f-bd4b-430f-8bd7-d345f4a467e0\") " Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.540640 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca" (OuterVolumeSpecName: "client-ca") pod "3687213f-bd4b-430f-8bd7-d345f4a467e0" (UID: "3687213f-bd4b-430f-8bd7-d345f4a467e0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.540952 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config" (OuterVolumeSpecName: "config") pod "3687213f-bd4b-430f-8bd7-d345f4a467e0" (UID: "3687213f-bd4b-430f-8bd7-d345f4a467e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.541133 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3687213f-bd4b-430f-8bd7-d345f4a467e0" (UID: "3687213f-bd4b-430f-8bd7-d345f4a467e0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.546214 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf" (OuterVolumeSpecName: "kube-api-access-d9wwf") pod "3687213f-bd4b-430f-8bd7-d345f4a467e0" (UID: "3687213f-bd4b-430f-8bd7-d345f4a467e0"). InnerVolumeSpecName "kube-api-access-d9wwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.548960 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3687213f-bd4b-430f-8bd7-d345f4a467e0" (UID: "3687213f-bd4b-430f-8bd7-d345f4a467e0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.641216 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.641265 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9wwf\" (UniqueName: \"kubernetes.io/projected/3687213f-bd4b-430f-8bd7-d345f4a467e0-kube-api-access-d9wwf\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.641279 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.641288 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3687213f-bd4b-430f-8bd7-d345f4a467e0-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:55 crc kubenswrapper[4792]: I0121 18:04:55.641296 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3687213f-bd4b-430f-8bd7-d345f4a467e0-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.112965 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:04:56 crc kubenswrapper[4792]: E0121 18:04:56.113480 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3687213f-bd4b-430f-8bd7-d345f4a467e0" containerName="controller-manager" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.113493 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3687213f-bd4b-430f-8bd7-d345f4a467e0" containerName="controller-manager" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.113590 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3687213f-bd4b-430f-8bd7-d345f4a467e0" containerName="controller-manager" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.114035 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.127020 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.255834 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.256010 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvv2n\" (UniqueName: \"kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.256064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.256108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.256166 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.269096 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.269126 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6" event={"ID":"3687213f-bd4b-430f-8bd7-d345f4a467e0","Type":"ContainerDied","Data":"916628966ac39f526bf3ea3c2c3737245b970f726a285f6ed36857d0f7535045"} Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.269297 4792 scope.go:117] "RemoveContainer" containerID="967877bc333e1a226f9ebc2b2b1d099cb006692a35698224150b806eb59b184b" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.272558 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4zsz" event={"ID":"1374e517-0337-44da-b942-0c7163208087","Type":"ContainerStarted","Data":"ae49cceab34fd66c158cb247994aa0336426d4c8a6fc5b1301d4958b6696f072"} Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.293498 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.312964 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-8cd75dbb6-7n9b6"] Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.318297 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z4zsz" podStartSLOduration=2.818006186 podStartE2EDuration="6.318230838s" podCreationTimestamp="2026-01-21 18:04:50 +0000 UTC" firstStartedPulling="2026-01-21 18:04:52.193798584 +0000 UTC m=+526.175761770" lastFinishedPulling="2026-01-21 18:04:55.694023236 +0000 UTC m=+529.675986422" observedRunningTime="2026-01-21 18:04:56.311149863 +0000 UTC m=+530.293113049" watchObservedRunningTime="2026-01-21 18:04:56.318230838 +0000 UTC m=+530.300194024" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.357288 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.357385 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvv2n\" (UniqueName: \"kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.357467 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.357492 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.357527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.358637 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.359232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.359955 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.363254 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.376442 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvv2n\" (UniqueName: \"kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n\") pod \"controller-manager-7876c8774d-6dgbs\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.454234 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:56 crc kubenswrapper[4792]: I0121 18:04:56.714956 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:04:56 crc kubenswrapper[4792]: W0121 18:04:56.738987 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc277231c_43c8_422a_8bf9_a8af620300d2.slice/crio-7773be049d0219a36589a8a7d4f6e95f94787e0976c69f002e0c6faebb933391 WatchSource:0}: Error finding container 7773be049d0219a36589a8a7d4f6e95f94787e0976c69f002e0c6faebb933391: Status 404 returned error can't find the container with id 7773be049d0219a36589a8a7d4f6e95f94787e0976c69f002e0c6faebb933391 Jan 21 18:04:57 crc kubenswrapper[4792]: I0121 18:04:57.280480 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" event={"ID":"c277231c-43c8-422a-8bf9-a8af620300d2","Type":"ContainerStarted","Data":"7773be049d0219a36589a8a7d4f6e95f94787e0976c69f002e0c6faebb933391"} Jan 21 18:04:57 crc kubenswrapper[4792]: I0121 18:04:57.932046 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:57 crc kubenswrapper[4792]: I0121 18:04:57.932754 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.010022 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.149086 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.149143 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.190718 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.253296 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3687213f-bd4b-430f-8bd7-d345f4a467e0" path="/var/lib/kubelet/pods/3687213f-bd4b-430f-8bd7-d345f4a467e0/volumes" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.289380 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.289760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" event={"ID":"c277231c-43c8-422a-8bf9-a8af620300d2","Type":"ContainerStarted","Data":"0cd48d9150b5b24bbe304a1ce613c82379fe7607c497ffa98974e56c326e467c"} Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.300083 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.311782 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" podStartSLOduration=4.311767095 podStartE2EDuration="4.311767095s" podCreationTimestamp="2026-01-21 18:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:04:58.309685768 +0000 UTC m=+532.291648974" watchObservedRunningTime="2026-01-21 18:04:58.311767095 +0000 UTC m=+532.293730281" Jan 21 18:04:58 crc kubenswrapper[4792]: I0121 18:04:58.351706 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:05:00 crc kubenswrapper[4792]: I0121 18:05:00.344755 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:05:00 crc kubenswrapper[4792]: I0121 18:05:00.346682 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:05:00 crc kubenswrapper[4792]: I0121 18:05:00.391436 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:05:00 crc kubenswrapper[4792]: I0121 18:05:00.563596 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:05:00 crc kubenswrapper[4792]: I0121 18:05:00.563669 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:05:01 crc kubenswrapper[4792]: I0121 18:05:01.352576 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:05:01 crc kubenswrapper[4792]: I0121 18:05:01.605378 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z4zsz" podUID="1374e517-0337-44da-b942-0c7163208087" containerName="registry-server" probeResult="failure" output=< Jan 21 18:05:01 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Jan 21 18:05:01 crc kubenswrapper[4792]: > Jan 21 18:05:02 crc kubenswrapper[4792]: I0121 18:05:02.293170 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-x48g6" Jan 21 18:05:02 crc kubenswrapper[4792]: I0121 18:05:02.354420 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 18:05:07 crc kubenswrapper[4792]: I0121 18:05:07.187056 4792 scope.go:117] "RemoveContainer" containerID="eb6962ae70b83122c0f2994039eb80c7baffbf9f30d2ff441e100c7b1c2e947e" Jan 21 18:05:08 crc kubenswrapper[4792]: I0121 18:05:08.189392 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gpzvv" Jan 21 18:05:10 crc kubenswrapper[4792]: I0121 18:05:10.605369 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:05:10 crc kubenswrapper[4792]: I0121 18:05:10.654636 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z4zsz" Jan 21 18:05:14 crc kubenswrapper[4792]: I0121 18:05:14.951081 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:05:14 crc kubenswrapper[4792]: I0121 18:05:14.951722 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" podUID="c277231c-43c8-422a-8bf9-a8af620300d2" containerName="controller-manager" containerID="cri-o://0cd48d9150b5b24bbe304a1ce613c82379fe7607c497ffa98974e56c326e467c" gracePeriod=30 Jan 21 18:05:14 crc kubenswrapper[4792]: I0121 18:05:14.971711 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:05:14 crc kubenswrapper[4792]: I0121 18:05:14.972076 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" podUID="1e553386-8570-488a-89ff-0afd3560c0f9" containerName="route-controller-manager" containerID="cri-o://2690804ddb6072d6c02889d33373a642955f299f525a07252d2ab40e9db00019" gracePeriod=30 Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.410465 4792 generic.go:334] "Generic (PLEG): container finished" podID="c277231c-43c8-422a-8bf9-a8af620300d2" containerID="0cd48d9150b5b24bbe304a1ce613c82379fe7607c497ffa98974e56c326e467c" exitCode=0 Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.410548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" event={"ID":"c277231c-43c8-422a-8bf9-a8af620300d2","Type":"ContainerDied","Data":"0cd48d9150b5b24bbe304a1ce613c82379fe7607c497ffa98974e56c326e467c"} Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.412621 4792 generic.go:334] "Generic (PLEG): container finished" podID="1e553386-8570-488a-89ff-0afd3560c0f9" containerID="2690804ddb6072d6c02889d33373a642955f299f525a07252d2ab40e9db00019" exitCode=0 Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.412653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" event={"ID":"1e553386-8570-488a-89ff-0afd3560c0f9","Type":"ContainerDied","Data":"2690804ddb6072d6c02889d33373a642955f299f525a07252d2ab40e9db00019"} Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.927119 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.931758 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.995614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles\") pod \"c277231c-43c8-422a-8bf9-a8af620300d2\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.995688 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert\") pod \"c277231c-43c8-422a-8bf9-a8af620300d2\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.995719 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvv2n\" (UniqueName: \"kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n\") pod \"c277231c-43c8-422a-8bf9-a8af620300d2\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " Jan 21 18:05:15 crc kubenswrapper[4792]: I0121 18:05:15.995750 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca\") pod \"1e553386-8570-488a-89ff-0afd3560c0f9\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.995921 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert\") pod \"1e553386-8570-488a-89ff-0afd3560c0f9\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.995959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config\") pod \"c277231c-43c8-422a-8bf9-a8af620300d2\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.996159 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca\") pod \"c277231c-43c8-422a-8bf9-a8af620300d2\" (UID: \"c277231c-43c8-422a-8bf9-a8af620300d2\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.996186 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config\") pod \"1e553386-8570-488a-89ff-0afd3560c0f9\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.996218 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k687\" (UniqueName: \"kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687\") pod \"1e553386-8570-488a-89ff-0afd3560c0f9\" (UID: \"1e553386-8570-488a-89ff-0afd3560c0f9\") " Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.997146 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca" (OuterVolumeSpecName: "client-ca") pod "1e553386-8570-488a-89ff-0afd3560c0f9" (UID: "1e553386-8570-488a-89ff-0afd3560c0f9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.997217 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config" (OuterVolumeSpecName: "config") pod "1e553386-8570-488a-89ff-0afd3560c0f9" (UID: "1e553386-8570-488a-89ff-0afd3560c0f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.997262 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config" (OuterVolumeSpecName: "config") pod "c277231c-43c8-422a-8bf9-a8af620300d2" (UID: "c277231c-43c8-422a-8bf9-a8af620300d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.997927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c277231c-43c8-422a-8bf9-a8af620300d2" (UID: "c277231c-43c8-422a-8bf9-a8af620300d2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:15.998709 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca" (OuterVolumeSpecName: "client-ca") pod "c277231c-43c8-422a-8bf9-a8af620300d2" (UID: "c277231c-43c8-422a-8bf9-a8af620300d2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.003114 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1e553386-8570-488a-89ff-0afd3560c0f9" (UID: "1e553386-8570-488a-89ff-0afd3560c0f9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.003479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c277231c-43c8-422a-8bf9-a8af620300d2" (UID: "c277231c-43c8-422a-8bf9-a8af620300d2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.004211 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n" (OuterVolumeSpecName: "kube-api-access-jvv2n") pod "c277231c-43c8-422a-8bf9-a8af620300d2" (UID: "c277231c-43c8-422a-8bf9-a8af620300d2"). InnerVolumeSpecName "kube-api-access-jvv2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.004629 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687" (OuterVolumeSpecName: "kube-api-access-8k687") pod "1e553386-8570-488a-89ff-0afd3560c0f9" (UID: "1e553386-8570-488a-89ff-0afd3560c0f9"). InnerVolumeSpecName "kube-api-access-8k687". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097230 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e553386-8570-488a-89ff-0afd3560c0f9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097295 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097305 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097314 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097325 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k687\" (UniqueName: \"kubernetes.io/projected/1e553386-8570-488a-89ff-0afd3560c0f9-kube-api-access-8k687\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097342 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c277231c-43c8-422a-8bf9-a8af620300d2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097353 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c277231c-43c8-422a-8bf9-a8af620300d2-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097364 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e553386-8570-488a-89ff-0afd3560c0f9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.097377 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvv2n\" (UniqueName: \"kubernetes.io/projected/c277231c-43c8-422a-8bf9-a8af620300d2-kube-api-access-jvv2n\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.129448 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:16 crc kubenswrapper[4792]: E0121 18:05:16.129835 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c277231c-43c8-422a-8bf9-a8af620300d2" containerName="controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.129992 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c277231c-43c8-422a-8bf9-a8af620300d2" containerName="controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: E0121 18:05:16.130007 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e553386-8570-488a-89ff-0afd3560c0f9" containerName="route-controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.130014 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e553386-8570-488a-89ff-0afd3560c0f9" containerName="route-controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.130148 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c277231c-43c8-422a-8bf9-a8af620300d2" containerName="controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.130165 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e553386-8570-488a-89ff-0afd3560c0f9" containerName="route-controller-manager" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.130752 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.136049 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.137205 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.147709 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.154823 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.198507 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5brg\" (UniqueName: \"kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.198605 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.198644 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.198678 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300033 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300128 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300162 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300205 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300319 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5brg\" (UniqueName: \"kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300466 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.300523 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7769\" (UniqueName: \"kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.302550 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.302633 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.308613 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.320755 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5brg\" (UniqueName: \"kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg\") pod \"route-controller-manager-7887cfcf8b-wjsq6\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.402088 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.402144 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7769\" (UniqueName: \"kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.402167 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.402228 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.402255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.403735 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.403902 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.404820 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.406175 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.419787 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" event={"ID":"c277231c-43c8-422a-8bf9-a8af620300d2","Type":"ContainerDied","Data":"7773be049d0219a36589a8a7d4f6e95f94787e0976c69f002e0c6faebb933391"} Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.419836 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7876c8774d-6dgbs" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.419885 4792 scope.go:117] "RemoveContainer" containerID="0cd48d9150b5b24bbe304a1ce613c82379fe7607c497ffa98974e56c326e467c" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.422200 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" event={"ID":"1e553386-8570-488a-89ff-0afd3560c0f9","Type":"ContainerDied","Data":"a72dec80a13be8d93e34c591867e154a1c7005bf4535199eefc6b3892846f5a1"} Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.422244 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.422994 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7769\" (UniqueName: \"kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769\") pod \"controller-manager-849c54f845-5sz8f\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.452782 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.460060 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.463468 4792 scope.go:117] "RemoveContainer" containerID="2690804ddb6072d6c02889d33373a642955f299f525a07252d2ab40e9db00019" Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.474174 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.477254 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7876c8774d-6dgbs"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.494824 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.502195 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6696d76777-8bfds"] Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.693164 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:16 crc kubenswrapper[4792]: W0121 18:05:16.700092 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cc7b05b_6f8e_4d39_a855_a21df963d2ba.slice/crio-3957b5ef2b0f9bdea6588a2506c0c7c48c04fccc8f1516e1e2ca5c5518e66534 WatchSource:0}: Error finding container 3957b5ef2b0f9bdea6588a2506c0c7c48c04fccc8f1516e1e2ca5c5518e66534: Status 404 returned error can't find the container with id 3957b5ef2b0f9bdea6588a2506c0c7c48c04fccc8f1516e1e2ca5c5518e66534 Jan 21 18:05:16 crc kubenswrapper[4792]: I0121 18:05:16.734321 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:16 crc kubenswrapper[4792]: W0121 18:05:16.745511 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1c8cf15_d53a_41c4_9bc0_6e97d3084cad.slice/crio-beb65314754679e4d2cc80600890669c0969cbaf0a8b8e0714f550540530f324 WatchSource:0}: Error finding container beb65314754679e4d2cc80600890669c0969cbaf0a8b8e0714f550540530f324: Status 404 returned error can't find the container with id beb65314754679e4d2cc80600890669c0969cbaf0a8b8e0714f550540530f324 Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.435326 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" event={"ID":"5cc7b05b-6f8e-4d39-a855-a21df963d2ba","Type":"ContainerStarted","Data":"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7"} Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.435756 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" event={"ID":"5cc7b05b-6f8e-4d39-a855-a21df963d2ba","Type":"ContainerStarted","Data":"3957b5ef2b0f9bdea6588a2506c0c7c48c04fccc8f1516e1e2ca5c5518e66534"} Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.436431 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.438803 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" event={"ID":"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad","Type":"ContainerStarted","Data":"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec"} Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.438832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" event={"ID":"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad","Type":"ContainerStarted","Data":"beb65314754679e4d2cc80600890669c0969cbaf0a8b8e0714f550540530f324"} Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.439421 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.442596 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.453691 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" podStartSLOduration=3.453670511 podStartE2EDuration="3.453670511s" podCreationTimestamp="2026-01-21 18:05:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:05:17.451731707 +0000 UTC m=+551.433694893" watchObservedRunningTime="2026-01-21 18:05:17.453670511 +0000 UTC m=+551.435633697" Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.471866 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" podStartSLOduration=2.471827831 podStartE2EDuration="2.471827831s" podCreationTimestamp="2026-01-21 18:05:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:05:17.470187315 +0000 UTC m=+551.452150521" watchObservedRunningTime="2026-01-21 18:05:17.471827831 +0000 UTC m=+551.453791017" Jan 21 18:05:17 crc kubenswrapper[4792]: I0121 18:05:17.632204 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:18 crc kubenswrapper[4792]: I0121 18:05:18.255137 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e553386-8570-488a-89ff-0afd3560c0f9" path="/var/lib/kubelet/pods/1e553386-8570-488a-89ff-0afd3560c0f9/volumes" Jan 21 18:05:18 crc kubenswrapper[4792]: I0121 18:05:18.255815 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c277231c-43c8-422a-8bf9-a8af620300d2" path="/var/lib/kubelet/pods/c277231c-43c8-422a-8bf9-a8af620300d2/volumes" Jan 21 18:05:27 crc kubenswrapper[4792]: I0121 18:05:27.408064 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" podUID="15a12100-5704-4b1b-b9db-4961709b2587" containerName="registry" containerID="cri-o://05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32" gracePeriod=30 Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.262957 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.378639 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.378793 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.378888 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.379024 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.379070 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.379284 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.379364 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjrq5\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.379410 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca\") pod \"15a12100-5704-4b1b-b9db-4961709b2587\" (UID: \"15a12100-5704-4b1b-b9db-4961709b2587\") " Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.380925 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.381240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.387597 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.387707 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.388075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.388401 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5" (OuterVolumeSpecName: "kube-api-access-xjrq5") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "kube-api-access-xjrq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.389543 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.398758 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "15a12100-5704-4b1b-b9db-4961709b2587" (UID: "15a12100-5704-4b1b-b9db-4961709b2587"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.480941 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.480980 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/15a12100-5704-4b1b-b9db-4961709b2587-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.480993 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjrq5\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-kube-api-access-xjrq5\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.481005 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.481014 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/15a12100-5704-4b1b-b9db-4961709b2587-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.481023 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/15a12100-5704-4b1b-b9db-4961709b2587-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.481032 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/15a12100-5704-4b1b-b9db-4961709b2587-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.512714 4792 generic.go:334] "Generic (PLEG): container finished" podID="15a12100-5704-4b1b-b9db-4961709b2587" containerID="05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32" exitCode=0 Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.512775 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" event={"ID":"15a12100-5704-4b1b-b9db-4961709b2587","Type":"ContainerDied","Data":"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32"} Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.512788 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.512815 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xscbt" event={"ID":"15a12100-5704-4b1b-b9db-4961709b2587","Type":"ContainerDied","Data":"7f14eaf3284f8cec53124445f5d2ffd3833e608b9f8600c2b6ead09e9d5a5e10"} Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.512838 4792 scope.go:117] "RemoveContainer" containerID="05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.537707 4792 scope.go:117] "RemoveContainer" containerID="05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32" Jan 21 18:05:28 crc kubenswrapper[4792]: E0121 18:05:28.538220 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32\": container with ID starting with 05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32 not found: ID does not exist" containerID="05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.538288 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32"} err="failed to get container status \"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32\": rpc error: code = NotFound desc = could not find container \"05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32\": container with ID starting with 05e800bc652b8534f791ae0e90dd5dcd84cf236d54bf0ddca74f277b45b9bb32 not found: ID does not exist" Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.549607 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 18:05:28 crc kubenswrapper[4792]: I0121 18:05:28.554018 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xscbt"] Jan 21 18:05:30 crc kubenswrapper[4792]: I0121 18:05:30.264008 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a12100-5704-4b1b-b9db-4961709b2587" path="/var/lib/kubelet/pods/15a12100-5704-4b1b-b9db-4961709b2587/volumes" Jan 21 18:05:34 crc kubenswrapper[4792]: I0121 18:05:34.937667 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:34 crc kubenswrapper[4792]: I0121 18:05:34.938257 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" podUID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" containerName="controller-manager" containerID="cri-o://019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7" gracePeriod=30 Jan 21 18:05:34 crc kubenswrapper[4792]: I0121 18:05:34.979699 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:34 crc kubenswrapper[4792]: I0121 18:05:34.979953 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" podUID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" containerName="route-controller-manager" containerID="cri-o://f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec" gracePeriod=30 Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.296331 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.326495 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.388900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles\") pod \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.388959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert\") pod \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.389025 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config\") pod \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.389049 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config\") pod \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.389971 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5cc7b05b-6f8e-4d39-a855-a21df963d2ba" (UID: "5cc7b05b-6f8e-4d39-a855-a21df963d2ba"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390004 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config" (OuterVolumeSpecName: "config") pod "5cc7b05b-6f8e-4d39-a855-a21df963d2ba" (UID: "5cc7b05b-6f8e-4d39-a855-a21df963d2ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390564 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca" (OuterVolumeSpecName: "client-ca") pod "5cc7b05b-6f8e-4d39-a855-a21df963d2ba" (UID: "5cc7b05b-6f8e-4d39-a855-a21df963d2ba"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390700 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config" (OuterVolumeSpecName: "config") pod "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" (UID: "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390736 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca\") pod \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390786 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert\") pod \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.390810 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7769\" (UniqueName: \"kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769\") pod \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\" (UID: \"5cc7b05b-6f8e-4d39-a855-a21df963d2ba\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.391214 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca\") pod \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.391245 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5brg\" (UniqueName: \"kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg\") pod \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\" (UID: \"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad\") " Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.391746 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca" (OuterVolumeSpecName: "client-ca") pod "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" (UID: "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.391995 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.392016 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.392027 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.392037 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.392046 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.395416 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" (UID: "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.395565 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769" (OuterVolumeSpecName: "kube-api-access-t7769") pod "5cc7b05b-6f8e-4d39-a855-a21df963d2ba" (UID: "5cc7b05b-6f8e-4d39-a855-a21df963d2ba"). InnerVolumeSpecName "kube-api-access-t7769". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.395598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg" (OuterVolumeSpecName: "kube-api-access-r5brg") pod "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" (UID: "d1c8cf15-d53a-41c4-9bc0-6e97d3084cad"). InnerVolumeSpecName "kube-api-access-r5brg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.396001 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5cc7b05b-6f8e-4d39-a855-a21df963d2ba" (UID: "5cc7b05b-6f8e-4d39-a855-a21df963d2ba"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.493135 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5brg\" (UniqueName: \"kubernetes.io/projected/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-kube-api-access-r5brg\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.493195 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.493208 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.493218 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7769\" (UniqueName: \"kubernetes.io/projected/5cc7b05b-6f8e-4d39-a855-a21df963d2ba-kube-api-access-t7769\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.563095 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.563172 4792 generic.go:334] "Generic (PLEG): container finished" podID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" containerID="019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7" exitCode=0 Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.563361 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" event={"ID":"5cc7b05b-6f8e-4d39-a855-a21df963d2ba","Type":"ContainerDied","Data":"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7"} Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.563406 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-849c54f845-5sz8f" event={"ID":"5cc7b05b-6f8e-4d39-a855-a21df963d2ba","Type":"ContainerDied","Data":"3957b5ef2b0f9bdea6588a2506c0c7c48c04fccc8f1516e1e2ca5c5518e66534"} Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.563430 4792 scope.go:117] "RemoveContainer" containerID="019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.565448 4792 generic.go:334] "Generic (PLEG): container finished" podID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" containerID="f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec" exitCode=0 Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.565486 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" event={"ID":"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad","Type":"ContainerDied","Data":"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec"} Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.565521 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" event={"ID":"d1c8cf15-d53a-41c4-9bc0-6e97d3084cad","Type":"ContainerDied","Data":"beb65314754679e4d2cc80600890669c0969cbaf0a8b8e0714f550540530f324"} Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.565536 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.584901 4792 scope.go:117] "RemoveContainer" containerID="019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7" Jan 21 18:05:35 crc kubenswrapper[4792]: E0121 18:05:35.585580 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7\": container with ID starting with 019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7 not found: ID does not exist" containerID="019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.585644 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7"} err="failed to get container status \"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7\": rpc error: code = NotFound desc = could not find container \"019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7\": container with ID starting with 019c0bb2be69eff0a48e626ede175e459288120dafad58352c91dc2f10e575e7 not found: ID does not exist" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.585693 4792 scope.go:117] "RemoveContainer" containerID="f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.599778 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.605089 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-849c54f845-5sz8f"] Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.612172 4792 scope.go:117] "RemoveContainer" containerID="f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.612595 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:35 crc kubenswrapper[4792]: E0121 18:05:35.612803 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec\": container with ID starting with f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec not found: ID does not exist" containerID="f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.612870 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec"} err="failed to get container status \"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec\": rpc error: code = NotFound desc = could not find container \"f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec\": container with ID starting with f1137361384e9d40435e92535d0e4bc6acb2ef993241edff4b6c359b02ec40ec not found: ID does not exist" Jan 21 18:05:35 crc kubenswrapper[4792]: I0121 18:05:35.616779 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7887cfcf8b-wjsq6"] Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.138798 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls"] Jan 21 18:05:36 crc kubenswrapper[4792]: E0121 18:05:36.139827 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" containerName="route-controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.139916 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" containerName="route-controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: E0121 18:05:36.139950 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a12100-5704-4b1b-b9db-4961709b2587" containerName="registry" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.140010 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a12100-5704-4b1b-b9db-4961709b2587" containerName="registry" Jan 21 18:05:36 crc kubenswrapper[4792]: E0121 18:05:36.140038 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" containerName="controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.140112 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" containerName="controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.140484 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a12100-5704-4b1b-b9db-4961709b2587" containerName="registry" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.140526 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" containerName="route-controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.140599 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" containerName="controller-manager" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.144708 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.146320 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-fc7df48c8-zxjdh"] Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.147381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.148448 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.149793 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.150654 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.150891 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.151133 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.151286 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.151622 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.151754 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.151974 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fc7df48c8-zxjdh"] Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.153047 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.153184 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.153685 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.154074 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.157833 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls"] Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.165348 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201331 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-config\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201378 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m2rt\" (UniqueName: \"kubernetes.io/projected/1d75e5b2-0e9f-418d-8111-e76535a94240-kube-api-access-9m2rt\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201409 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-serving-cert\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201472 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d75e5b2-0e9f-418d-8111-e76535a94240-serving-cert\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201496 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-config\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201533 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-client-ca\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201570 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-client-ca\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201686 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-proxy-ca-bundles\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.201796 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mctml\" (UniqueName: \"kubernetes.io/projected/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-kube-api-access-mctml\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.255914 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cc7b05b-6f8e-4d39-a855-a21df963d2ba" path="/var/lib/kubelet/pods/5cc7b05b-6f8e-4d39-a855-a21df963d2ba/volumes" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.256716 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1c8cf15-d53a-41c4-9bc0-6e97d3084cad" path="/var/lib/kubelet/pods/d1c8cf15-d53a-41c4-9bc0-6e97d3084cad/volumes" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303313 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-config\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m2rt\" (UniqueName: \"kubernetes.io/projected/1d75e5b2-0e9f-418d-8111-e76535a94240-kube-api-access-9m2rt\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303403 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-serving-cert\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d75e5b2-0e9f-418d-8111-e76535a94240-serving-cert\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303493 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-config\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303580 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-client-ca\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303628 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-client-ca\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303657 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-proxy-ca-bundles\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.303693 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mctml\" (UniqueName: \"kubernetes.io/projected/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-kube-api-access-mctml\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.305339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-config\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.306870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-client-ca\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.307076 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-client-ca\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.307144 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-config\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.308661 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d75e5b2-0e9f-418d-8111-e76535a94240-proxy-ca-bundles\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.311090 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d75e5b2-0e9f-418d-8111-e76535a94240-serving-cert\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.311678 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-serving-cert\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.320514 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m2rt\" (UniqueName: \"kubernetes.io/projected/1d75e5b2-0e9f-418d-8111-e76535a94240-kube-api-access-9m2rt\") pod \"controller-manager-fc7df48c8-zxjdh\" (UID: \"1d75e5b2-0e9f-418d-8111-e76535a94240\") " pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.331725 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mctml\" (UniqueName: \"kubernetes.io/projected/d8b6374a-b677-46e4-ae68-0d4a9858e4c3-kube-api-access-mctml\") pod \"route-controller-manager-5b9c779c94-brjls\" (UID: \"d8b6374a-b677-46e4-ae68-0d4a9858e4c3\") " pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.492032 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.501626 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.757234 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fc7df48c8-zxjdh"] Jan 21 18:05:36 crc kubenswrapper[4792]: I0121 18:05:36.983179 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls"] Jan 21 18:05:36 crc kubenswrapper[4792]: W0121 18:05:36.992093 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8b6374a_b677_46e4_ae68_0d4a9858e4c3.slice/crio-91b17ed2089f907b716166b74b607276f7778a85813750b8eb32a792a5f81b98 WatchSource:0}: Error finding container 91b17ed2089f907b716166b74b607276f7778a85813750b8eb32a792a5f81b98: Status 404 returned error can't find the container with id 91b17ed2089f907b716166b74b607276f7778a85813750b8eb32a792a5f81b98 Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.600936 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" event={"ID":"1d75e5b2-0e9f-418d-8111-e76535a94240","Type":"ContainerStarted","Data":"43af0e062f1772194817b2553cccaa0537e59b91d0b4a98714604fa6f9d93f18"} Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.601543 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" event={"ID":"1d75e5b2-0e9f-418d-8111-e76535a94240","Type":"ContainerStarted","Data":"b588675e0ca83e04724821fb2f4e1a63c4db7ba300c2c9e3c8c79453503c0c14"} Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.601783 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.604660 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" event={"ID":"d8b6374a-b677-46e4-ae68-0d4a9858e4c3","Type":"ContainerStarted","Data":"1c0f8ae69a389ab23782c9c4dcd8860811b2b38ef7482296c4afe17d9a24d118"} Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.604727 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" event={"ID":"d8b6374a-b677-46e4-ae68-0d4a9858e4c3","Type":"ContainerStarted","Data":"91b17ed2089f907b716166b74b607276f7778a85813750b8eb32a792a5f81b98"} Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.605121 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.607975 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.625425 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-fc7df48c8-zxjdh" podStartSLOduration=3.625403043 podStartE2EDuration="3.625403043s" podCreationTimestamp="2026-01-21 18:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:05:37.621214509 +0000 UTC m=+571.603177705" watchObservedRunningTime="2026-01-21 18:05:37.625403043 +0000 UTC m=+571.607366229" Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.653721 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" podStartSLOduration=3.653698043 podStartE2EDuration="3.653698043s" podCreationTimestamp="2026-01-21 18:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:05:37.64669626 +0000 UTC m=+571.628659456" watchObservedRunningTime="2026-01-21 18:05:37.653698043 +0000 UTC m=+571.635661219" Jan 21 18:05:37 crc kubenswrapper[4792]: I0121 18:05:37.762225 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b9c779c94-brjls" Jan 21 18:05:53 crc kubenswrapper[4792]: I0121 18:05:53.570512 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:05:53 crc kubenswrapper[4792]: I0121 18:05:53.571171 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:06:23 crc kubenswrapper[4792]: I0121 18:06:23.571383 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:06:23 crc kubenswrapper[4792]: I0121 18:06:23.572198 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:06:53 crc kubenswrapper[4792]: I0121 18:06:53.570402 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:06:53 crc kubenswrapper[4792]: I0121 18:06:53.571034 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:06:53 crc kubenswrapper[4792]: I0121 18:06:53.571107 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:06:53 crc kubenswrapper[4792]: I0121 18:06:53.571899 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:06:53 crc kubenswrapper[4792]: I0121 18:06:53.572011 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214" gracePeriod=600 Jan 21 18:06:53 crc kubenswrapper[4792]: E0121 18:06:53.688136 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod759f2e21_e44e_4049_b262_cb49448e22ab.slice/crio-4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214.scope\": RecentStats: unable to find data in memory cache]" Jan 21 18:06:54 crc kubenswrapper[4792]: I0121 18:06:54.052116 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214" exitCode=0 Jan 21 18:06:54 crc kubenswrapper[4792]: I0121 18:06:54.052180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214"} Jan 21 18:06:54 crc kubenswrapper[4792]: I0121 18:06:54.052223 4792 scope.go:117] "RemoveContainer" containerID="257c9e74cbb9b9f92ff3f280789fa1e4b06398d7e5151781ef0a5144cab14c51" Jan 21 18:06:55 crc kubenswrapper[4792]: I0121 18:06:55.059207 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998"} Jan 21 18:07:06 crc kubenswrapper[4792]: I0121 18:07:06.373400 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.409251 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" containerID="cri-o://7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5" gracePeriod=15 Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.762628 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.796296 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-98d6fc98d-zjmj5"] Jan 21 18:07:31 crc kubenswrapper[4792]: E0121 18:07:31.796620 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.796639 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.796805 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerName="oauth-openshift" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.797438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.812934 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-98d6fc98d-zjmj5"] Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875580 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875693 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875716 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875754 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875780 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875802 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvfks\" (UniqueName: \"kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875822 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875889 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875908 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875943 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875965 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.875986 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876039 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876063 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error\") pod \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\" (UID: \"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1\") " Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-router-certs\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876243 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-dir\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876260 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876296 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-policies\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876320 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876345 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876365 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876392 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-error\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876423 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-login\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2zs5\" (UniqueName: \"kubernetes.io/projected/19a858c1-bb81-48e9-acd6-fa67204bbdf2-kube-api-access-w2zs5\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876467 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-service-ca\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876487 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.876505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-session\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.877454 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.877431 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.877520 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.877657 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.878160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.885584 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.886054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.886571 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.886611 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks" (OuterVolumeSpecName: "kube-api-access-jvfks") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "kube-api-access-jvfks". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.887022 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.888160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.888475 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.889150 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.890391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" (UID: "7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.977894 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2zs5\" (UniqueName: \"kubernetes.io/projected/19a858c1-bb81-48e9-acd6-fa67204bbdf2-kube-api-access-w2zs5\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.978281 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-service-ca\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-session\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-router-certs\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979253 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979293 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979318 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-dir\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979398 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-policies\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979435 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979527 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979572 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979660 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-error\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979745 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-login\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979837 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979877 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979889 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979902 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979913 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979925 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvfks\" (UniqueName: \"kubernetes.io/projected/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-kube-api-access-jvfks\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979937 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979952 4792 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979964 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.979982 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980002 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980006 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980016 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980016 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-service-ca\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980044 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980084 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.980343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-dir\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.981074 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-audit-policies\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.981086 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.984323 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-error\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.984602 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-router-certs\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.984718 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.984832 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-session\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.984881 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-login\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.985422 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.985878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.986206 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/19a858c1-bb81-48e9-acd6-fa67204bbdf2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:31 crc kubenswrapper[4792]: I0121 18:07:31.996270 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2zs5\" (UniqueName: \"kubernetes.io/projected/19a858c1-bb81-48e9-acd6-fa67204bbdf2-kube-api-access-w2zs5\") pod \"oauth-openshift-98d6fc98d-zjmj5\" (UID: \"19a858c1-bb81-48e9-acd6-fa67204bbdf2\") " pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.118721 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.305759 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.305798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" event={"ID":"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1","Type":"ContainerDied","Data":"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5"} Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.306717 4792 scope.go:117] "RemoveContainer" containerID="7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.308419 4792 generic.go:334] "Generic (PLEG): container finished" podID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" containerID="7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5" exitCode=0 Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.308500 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bxqhq" event={"ID":"7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1","Type":"ContainerDied","Data":"f97cd0d37a983c51ab543cfdbeae52146425749ad7709041f9d35de23dfebdee"} Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.336005 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.337022 4792 scope.go:117] "RemoveContainer" containerID="7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5" Jan 21 18:07:32 crc kubenswrapper[4792]: E0121 18:07:32.337473 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5\": container with ID starting with 7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5 not found: ID does not exist" containerID="7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.337511 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5"} err="failed to get container status \"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5\": rpc error: code = NotFound desc = could not find container \"7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5\": container with ID starting with 7821e92844ddc4e01c36badad320617a5d002140ae36944a246428d74eb28fc5 not found: ID does not exist" Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.339697 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bxqhq"] Jan 21 18:07:32 crc kubenswrapper[4792]: I0121 18:07:32.402155 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-98d6fc98d-zjmj5"] Jan 21 18:07:33 crc kubenswrapper[4792]: I0121 18:07:33.317626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" event={"ID":"19a858c1-bb81-48e9-acd6-fa67204bbdf2","Type":"ContainerStarted","Data":"ffe1ffed3c443741ea2f2a53fa39dcf130240f546c00dbcbca93c6d0dbe07e07"} Jan 21 18:07:33 crc kubenswrapper[4792]: I0121 18:07:33.318221 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:33 crc kubenswrapper[4792]: I0121 18:07:33.318238 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" event={"ID":"19a858c1-bb81-48e9-acd6-fa67204bbdf2","Type":"ContainerStarted","Data":"1aaeb4cbb5aacd0d76c17fdc11b44aca50014c97e80a2132055923d5e4a7b595"} Jan 21 18:07:33 crc kubenswrapper[4792]: I0121 18:07:33.336979 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" Jan 21 18:07:33 crc kubenswrapper[4792]: I0121 18:07:33.352653 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-98d6fc98d-zjmj5" podStartSLOduration=27.352612468 podStartE2EDuration="27.352612468s" podCreationTimestamp="2026-01-21 18:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:07:33.347682783 +0000 UTC m=+687.329645969" watchObservedRunningTime="2026-01-21 18:07:33.352612468 +0000 UTC m=+687.334575654" Jan 21 18:07:34 crc kubenswrapper[4792]: I0121 18:07:34.254585 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1" path="/var/lib/kubelet/pods/7d6d1133-dec7-4cec-bfcc-ea983d2a8bf1/volumes" Jan 21 18:08:07 crc kubenswrapper[4792]: I0121 18:08:07.365172 4792 scope.go:117] "RemoveContainer" containerID="b6584bed35fdc8e912f8a486a70ff330f77dd4411792d788dff8f09e34010af9" Jan 21 18:08:07 crc kubenswrapper[4792]: I0121 18:08:07.392751 4792 scope.go:117] "RemoveContainer" containerID="097ce796343fcfa6a82768d22ef8e67f82cd3ea746c910a1f9fc14b43421a07f" Jan 21 18:09:03 crc kubenswrapper[4792]: I0121 18:09:03.232974 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 18:09:23 crc kubenswrapper[4792]: I0121 18:09:23.571075 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:09:23 crc kubenswrapper[4792]: I0121 18:09:23.571822 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:09:53 crc kubenswrapper[4792]: I0121 18:09:53.570207 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:09:53 crc kubenswrapper[4792]: I0121 18:09:53.571892 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.538460 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8t4xq"] Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539580 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-controller" containerID="cri-o://5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539668 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="nbdb" containerID="cri-o://26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539761 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="northd" containerID="cri-o://dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539763 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-node" containerID="cri-o://c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539780 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-acl-logging" containerID="cri-o://67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.539696 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.540053 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="sbdb" containerID="cri-o://34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26" gracePeriod=30 Jan 21 18:09:59 crc kubenswrapper[4792]: I0121 18:09:59.587514 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" containerID="cri-o://737adb699317b3944466c13490578c30fb1889fc1dc4298b2cf8ae612c8c94c9" gracePeriod=30 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.221192 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovnkube-controller/3.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.224529 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-acl-logging/0.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.225369 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-controller/0.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226233 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="737adb699317b3944466c13490578c30fb1889fc1dc4298b2cf8ae612c8c94c9" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226261 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226288 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226299 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226308 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226318 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb" exitCode=0 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226326 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522" exitCode=143 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226334 4792 generic.go:334] "Generic (PLEG): container finished" podID="10549a02-b482-4bc0-a770-65dbb57f340a" containerID="5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243" exitCode=143 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226308 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"737adb699317b3944466c13490578c30fb1889fc1dc4298b2cf8ae612c8c94c9"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226478 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226490 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226514 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226525 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226536 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.226569 4792 scope.go:117] "RemoveContainer" containerID="5754c1d5d813fa2c06641540f162fab4e392271c5f7e4ff6406221b0a107bea3" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.229064 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/2.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.230108 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/1.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.230160 4792 generic.go:334] "Generic (PLEG): container finished" podID="129c7cf1-6a9e-440a-8d4e-049c0652cf6e" containerID="da9feb3c04286ecb6efb1df895e99a5d49f616f2aff0c41da9f572606de171ae" exitCode=2 Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.230178 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerDied","Data":"da9feb3c04286ecb6efb1df895e99a5d49f616f2aff0c41da9f572606de171ae"} Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.230790 4792 scope.go:117] "RemoveContainer" containerID="da9feb3c04286ecb6efb1df895e99a5d49f616f2aff0c41da9f572606de171ae" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.271693 4792 scope.go:117] "RemoveContainer" containerID="8e20f1abd9dbc3d916becd3d87decba7d7ee0645031748385d7ab8eeb1b5ae74" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.395298 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-acl-logging/0.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.396261 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-controller/0.log" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.396970 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.457060 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bqfth"] Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458252 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458275 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458283 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458289 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458297 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458304 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458314 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kubecfg-setup" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458320 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kubecfg-setup" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458328 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458334 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458339 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="northd" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458345 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="northd" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458354 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458359 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458368 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-node" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458374 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-node" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458383 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="sbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458389 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="sbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458413 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="nbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458418 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="nbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458426 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-acl-logging" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458432 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-acl-logging" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458442 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458447 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458535 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="northd" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458545 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458554 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458561 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458567 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="nbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458575 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458583 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="sbdb" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458589 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458596 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovn-acl-logging" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458604 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458613 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="kube-rbac-proxy-node" Jan 21 18:10:00 crc kubenswrapper[4792]: E0121 18:10:00.458701 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458708 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.458790 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" containerName="ovnkube-controller" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.460838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.563347 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.563771 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.563950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.563974 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564236 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564289 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564336 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564359 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564374 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564431 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564455 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564487 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564517 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564563 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564578 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564602 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564647 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564677 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnmzx\" (UniqueName: \"kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564695 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564721 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564737 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch\") pod \"10549a02-b482-4bc0-a770-65dbb57f340a\" (UID: \"10549a02-b482-4bc0-a770-65dbb57f340a\") " Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564954 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-netns\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564981 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-bin\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564337 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565001 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-script-lib\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565025 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-slash\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565050 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-systemd-units\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565088 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-ovn\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565107 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k58g\" (UniqueName: \"kubernetes.io/projected/73db8af8-90ee-447c-b25f-919753d6cb80-kube-api-access-2k58g\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565148 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-env-overrides\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565165 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-kubelet\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-systemd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565211 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-log-socket\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565229 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73db8af8-90ee-447c-b25f-919753d6cb80-ovn-node-metrics-cert\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-etc-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564964 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565382 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565320 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565454 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-var-lib-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.564985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash" (OuterVolumeSpecName: "host-slash") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565227 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565252 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565272 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565291 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565309 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565646 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-node-log\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565691 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-netd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565819 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-config\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565907 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565922 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565934 4792 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565951 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565968 4792 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565981 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.565994 4792 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566006 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566017 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566030 4792 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-slash\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566041 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/10549a02-b482-4bc0-a770-65dbb57f340a-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566051 4792 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566093 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket" (OuterVolumeSpecName: "log-socket") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566120 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566144 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log" (OuterVolumeSpecName: "node-log") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566188 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.566632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.572403 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx" (OuterVolumeSpecName: "kube-api-access-rnmzx") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "kube-api-access-rnmzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.574406 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.580708 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "10549a02-b482-4bc0-a770-65dbb57f340a" (UID: "10549a02-b482-4bc0-a770-65dbb57f340a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669552 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-config\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669649 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-netns\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669693 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-bin\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669725 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-script-lib\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669755 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-slash\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669780 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669804 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-systemd-units\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-netns\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669957 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-ovn\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.669995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k58g\" (UniqueName: \"kubernetes.io/projected/73db8af8-90ee-447c-b25f-919753d6cb80-kube-api-access-2k58g\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670069 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-env-overrides\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-kubelet\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670127 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-systemd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670189 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-log-socket\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670271 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73db8af8-90ee-447c-b25f-919753d6cb80-ovn-node-metrics-cert\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-etc-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-var-lib-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670482 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-config\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670538 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-node-log\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670581 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-netd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670647 4792 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670680 4792 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670723 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-bin\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670764 4792 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-node-log\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670772 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-etc-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-kubelet\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670822 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670864 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-systemd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670875 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-env-overrides\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670885 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-var-lib-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670910 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-log-socket\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73db8af8-90ee-447c-b25f-919753d6cb80-ovnkube-script-lib\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670949 4792 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-log-socket\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-node-log\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670980 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-run-ovn-kubernetes\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670995 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-slash\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.670982 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-openvswitch\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671029 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-host-cni-netd\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671049 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-systemd-units\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671074 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73db8af8-90ee-447c-b25f-919753d6cb80-run-ovn\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671666 4792 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671693 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnmzx\" (UniqueName: \"kubernetes.io/projected/10549a02-b482-4bc0-a770-65dbb57f340a-kube-api-access-rnmzx\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671706 4792 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/10549a02-b482-4bc0-a770-65dbb57f340a-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.671718 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/10549a02-b482-4bc0-a770-65dbb57f340a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.674504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73db8af8-90ee-447c-b25f-919753d6cb80-ovn-node-metrics-cert\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.691012 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k58g\" (UniqueName: \"kubernetes.io/projected/73db8af8-90ee-447c-b25f-919753d6cb80-kube-api-access-2k58g\") pod \"ovnkube-node-bqfth\" (UID: \"73db8af8-90ee-447c-b25f-919753d6cb80\") " pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: I0121 18:10:00.775150 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:00 crc kubenswrapper[4792]: W0121 18:10:00.800939 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73db8af8_90ee_447c_b25f_919753d6cb80.slice/crio-5e194eda9e40cdf41e30ab633e4bd8111b391d107a48cfbd9701ba83fe2332c1 WatchSource:0}: Error finding container 5e194eda9e40cdf41e30ab633e4bd8111b391d107a48cfbd9701ba83fe2332c1: Status 404 returned error can't find the container with id 5e194eda9e40cdf41e30ab633e4bd8111b391d107a48cfbd9701ba83fe2332c1 Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.238515 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/2.log" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.238636 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tvdgr" event={"ID":"129c7cf1-6a9e-440a-8d4e-049c0652cf6e","Type":"ContainerStarted","Data":"cce4d03a977f44c7f6f6ab88f8fadedbca282ce59f8060b96765cf7eb6dd1ae2"} Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.243047 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-acl-logging/0.log" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.243471 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-8t4xq_10549a02-b482-4bc0-a770-65dbb57f340a/ovn-controller/0.log" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.243876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" event={"ID":"10549a02-b482-4bc0-a770-65dbb57f340a","Type":"ContainerDied","Data":"493301990d1115a0e0327f815a7eef1bf32ed93b3085913126cf1a7d86cde676"} Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.243919 4792 scope.go:117] "RemoveContainer" containerID="737adb699317b3944466c13490578c30fb1889fc1dc4298b2cf8ae612c8c94c9" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.244081 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8t4xq" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.248137 4792 generic.go:334] "Generic (PLEG): container finished" podID="73db8af8-90ee-447c-b25f-919753d6cb80" containerID="84861e1e5e50ade2a2916a33048d8d097d0e65cb119e9ced1a002cfade5dcfca" exitCode=0 Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.248211 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerDied","Data":"84861e1e5e50ade2a2916a33048d8d097d0e65cb119e9ced1a002cfade5dcfca"} Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.248249 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"5e194eda9e40cdf41e30ab633e4bd8111b391d107a48cfbd9701ba83fe2332c1"} Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.279448 4792 scope.go:117] "RemoveContainer" containerID="34590965e9e9747e10f05f5afd0bda7415523fecb2b910d9eebc5385d476de26" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.304516 4792 scope.go:117] "RemoveContainer" containerID="26eaa4cba4f292a7bd950b1a3b06b6b754d10006c423caca6d3aef02ef4041de" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.328607 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8t4xq"] Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.333224 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8t4xq"] Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.347192 4792 scope.go:117] "RemoveContainer" containerID="dbe42ea058a01898f1f74034d7a52e0c13a6cc66078ff79b82135aac1b61844b" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.371046 4792 scope.go:117] "RemoveContainer" containerID="7c448576a421fa1cec76b4a169b479528979a7ca2c115501b9eb388e804f88cd" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.388701 4792 scope.go:117] "RemoveContainer" containerID="c651bf27107391f40915e9599d33fb1065f9e79c8d9fd19a5025cd0cbc8bf2fb" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.404819 4792 scope.go:117] "RemoveContainer" containerID="67e921528e7063b41597286881ffbd210378b508cb15b3c44963a9d7aa7bf522" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.422502 4792 scope.go:117] "RemoveContainer" containerID="5776fe0fcd2e1cbeb88acfd763523313130b834a7834c307e97838c7c3ef3243" Jan 21 18:10:01 crc kubenswrapper[4792]: I0121 18:10:01.443402 4792 scope.go:117] "RemoveContainer" containerID="421fba709c2f604744985e46877d23f98a84dff076c780d73e1d7d23421b0b0a" Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.255736 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10549a02-b482-4bc0-a770-65dbb57f340a" path="/var/lib/kubelet/pods/10549a02-b482-4bc0-a770-65dbb57f340a/volumes" Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259117 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"28813a60e4bdd1a13099649c268169c909f93f1510fc942cfcc9bd8aef63db51"} Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259152 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"072d62f5f6a4be2da90e8d2ec9dd648a9392a4314d642124f2848f9689f87a0a"} Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259165 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"02457f4b448177d899794dc7bc3641d10637920494e69697b689e2d35605760d"} Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"60b70c2f810002a6ae760f536aa63444f7d9bddd3051590c22794956c7e21d54"} Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259184 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"c22a8056bb27a32cfdcb26e5df61a454d995f56dc6a001ebd93675d9cf1bfafc"} Jan 21 18:10:02 crc kubenswrapper[4792]: I0121 18:10:02.259192 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"b2dc655fc8c66aace600d8a20f023fb823d506b9e588f47291a13922074d1140"} Jan 21 18:10:04 crc kubenswrapper[4792]: I0121 18:10:04.279224 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"91c1571296a67c990094659f7560949db49afda4ce8763db4b1bcfeb564ccc08"} Jan 21 18:10:07 crc kubenswrapper[4792]: I0121 18:10:07.300276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" event={"ID":"73db8af8-90ee-447c-b25f-919753d6cb80","Type":"ContainerStarted","Data":"c589ea33b954719f6393288d5e875e8c3d680a3afa7fd73bac7dafc4da26f16e"} Jan 21 18:10:08 crc kubenswrapper[4792]: I0121 18:10:08.306647 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:08 crc kubenswrapper[4792]: I0121 18:10:08.410387 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:08 crc kubenswrapper[4792]: I0121 18:10:08.418997 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" podStartSLOduration=8.418977004 podStartE2EDuration="8.418977004s" podCreationTimestamp="2026-01-21 18:10:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:10:08.413771251 +0000 UTC m=+842.395734437" watchObservedRunningTime="2026-01-21 18:10:08.418977004 +0000 UTC m=+842.400940190" Jan 21 18:10:09 crc kubenswrapper[4792]: I0121 18:10:09.313671 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:09 crc kubenswrapper[4792]: I0121 18:10:09.313747 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:09 crc kubenswrapper[4792]: I0121 18:10:09.343317 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:10:23 crc kubenswrapper[4792]: I0121 18:10:23.571195 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:10:23 crc kubenswrapper[4792]: I0121 18:10:23.572060 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:10:23 crc kubenswrapper[4792]: I0121 18:10:23.572131 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:10:23 crc kubenswrapper[4792]: I0121 18:10:23.573002 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:10:23 crc kubenswrapper[4792]: I0121 18:10:23.573055 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998" gracePeriod=600 Jan 21 18:10:24 crc kubenswrapper[4792]: I0121 18:10:24.408420 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998" exitCode=0 Jan 21 18:10:24 crc kubenswrapper[4792]: I0121 18:10:24.408507 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998"} Jan 21 18:10:24 crc kubenswrapper[4792]: I0121 18:10:24.409603 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e"} Jan 21 18:10:24 crc kubenswrapper[4792]: I0121 18:10:24.409720 4792 scope.go:117] "RemoveContainer" containerID="4c628e700d7806ef45f8526a77baeb62e6d9e6f405ad08f7dde704d18179d214" Jan 21 18:10:30 crc kubenswrapper[4792]: I0121 18:10:30.801534 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bqfth" Jan 21 18:11:13 crc kubenswrapper[4792]: I0121 18:11:13.842103 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:11:13 crc kubenswrapper[4792]: I0121 18:11:13.842920 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rcgcm" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="registry-server" containerID="cri-o://aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584" gracePeriod=30 Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.193644 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.367585 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content\") pod \"d5388300-3f00-46d9-9def-a945f6b3e4f9\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.367672 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmkt8\" (UniqueName: \"kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8\") pod \"d5388300-3f00-46d9-9def-a945f6b3e4f9\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.367783 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities\") pod \"d5388300-3f00-46d9-9def-a945f6b3e4f9\" (UID: \"d5388300-3f00-46d9-9def-a945f6b3e4f9\") " Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.369052 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities" (OuterVolumeSpecName: "utilities") pod "d5388300-3f00-46d9-9def-a945f6b3e4f9" (UID: "d5388300-3f00-46d9-9def-a945f6b3e4f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.374122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8" (OuterVolumeSpecName: "kube-api-access-jmkt8") pod "d5388300-3f00-46d9-9def-a945f6b3e4f9" (UID: "d5388300-3f00-46d9-9def-a945f6b3e4f9"). InnerVolumeSpecName "kube-api-access-jmkt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.392681 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5388300-3f00-46d9-9def-a945f6b3e4f9" (UID: "d5388300-3f00-46d9-9def-a945f6b3e4f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.469577 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.469624 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmkt8\" (UniqueName: \"kubernetes.io/projected/d5388300-3f00-46d9-9def-a945f6b3e4f9-kube-api-access-jmkt8\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.469638 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5388300-3f00-46d9-9def-a945f6b3e4f9-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.722095 4792 generic.go:334] "Generic (PLEG): container finished" podID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerID="aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584" exitCode=0 Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.722134 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerDied","Data":"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584"} Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.722163 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rcgcm" event={"ID":"d5388300-3f00-46d9-9def-a945f6b3e4f9","Type":"ContainerDied","Data":"fa71c738f7906e9a6190ee01d69e4e31bdee5974cf3cf0d15d1c0896b2bdd165"} Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.722183 4792 scope.go:117] "RemoveContainer" containerID="aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.722209 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rcgcm" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.742007 4792 scope.go:117] "RemoveContainer" containerID="f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.754987 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.759891 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rcgcm"] Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.778539 4792 scope.go:117] "RemoveContainer" containerID="f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.797735 4792 scope.go:117] "RemoveContainer" containerID="aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584" Jan 21 18:11:14 crc kubenswrapper[4792]: E0121 18:11:14.798168 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584\": container with ID starting with aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584 not found: ID does not exist" containerID="aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.798200 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584"} err="failed to get container status \"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584\": rpc error: code = NotFound desc = could not find container \"aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584\": container with ID starting with aba366971dc0c297f356819f93ceb9f041a36de7023c15920bd824bbd955c584 not found: ID does not exist" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.798224 4792 scope.go:117] "RemoveContainer" containerID="f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2" Jan 21 18:11:14 crc kubenswrapper[4792]: E0121 18:11:14.798540 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2\": container with ID starting with f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2 not found: ID does not exist" containerID="f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.798588 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2"} err="failed to get container status \"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2\": rpc error: code = NotFound desc = could not find container \"f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2\": container with ID starting with f0b3972d49a92ab46b5bbab5f00a5fafa60d2a6903ed0fbc6a4220722fcefbb2 not found: ID does not exist" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.798618 4792 scope.go:117] "RemoveContainer" containerID="f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3" Jan 21 18:11:14 crc kubenswrapper[4792]: E0121 18:11:14.798930 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3\": container with ID starting with f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3 not found: ID does not exist" containerID="f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3" Jan 21 18:11:14 crc kubenswrapper[4792]: I0121 18:11:14.798958 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3"} err="failed to get container status \"f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3\": rpc error: code = NotFound desc = could not find container \"f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3\": container with ID starting with f82316a0357f770b6f13c37d3b064fbc08a22680fd8b6c00393af8c286527ab3 not found: ID does not exist" Jan 21 18:11:16 crc kubenswrapper[4792]: I0121 18:11:16.254449 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" path="/var/lib/kubelet/pods/d5388300-3f00-46d9-9def-a945f6b3e4f9/volumes" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.875938 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc"] Jan 21 18:11:17 crc kubenswrapper[4792]: E0121 18:11:17.876575 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="extract-utilities" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.876590 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="extract-utilities" Jan 21 18:11:17 crc kubenswrapper[4792]: E0121 18:11:17.876604 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="registry-server" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.876610 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="registry-server" Jan 21 18:11:17 crc kubenswrapper[4792]: E0121 18:11:17.876622 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="extract-content" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.876628 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="extract-content" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.876717 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5388300-3f00-46d9-9def-a945f6b3e4f9" containerName="registry-server" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.877545 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.880008 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.889738 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc"] Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.917337 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.917404 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:17 crc kubenswrapper[4792]: I0121 18:11:17.917454 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltszt\" (UniqueName: \"kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.017917 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.017968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.018015 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltszt\" (UniqueName: \"kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.018527 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.018601 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.038633 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltszt\" (UniqueName: \"kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.258238 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.462139 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc"] Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.747353 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerStarted","Data":"72badc1af5ef116a9df11624923c83f4c2971d70eba7727e2300a7dc2b638de6"} Jan 21 18:11:18 crc kubenswrapper[4792]: I0121 18:11:18.747886 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerStarted","Data":"38536a972a8bb8820f88bbf896dfb01cbcd6ef6d9a9289c20b19549296787c0a"} Jan 21 18:11:19 crc kubenswrapper[4792]: I0121 18:11:19.755009 4792 generic.go:334] "Generic (PLEG): container finished" podID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerID="72badc1af5ef116a9df11624923c83f4c2971d70eba7727e2300a7dc2b638de6" exitCode=0 Jan 21 18:11:19 crc kubenswrapper[4792]: I0121 18:11:19.755061 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerDied","Data":"72badc1af5ef116a9df11624923c83f4c2971d70eba7727e2300a7dc2b638de6"} Jan 21 18:11:19 crc kubenswrapper[4792]: I0121 18:11:19.756727 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.780193 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.781576 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.800122 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.871771 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lkzx\" (UniqueName: \"kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.872164 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.872194 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.973785 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lkzx\" (UniqueName: \"kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.973861 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.973897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.974510 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:20 crc kubenswrapper[4792]: I0121 18:11:20.974760 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.013182 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lkzx\" (UniqueName: \"kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx\") pod \"redhat-operators-v2pzt\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.108915 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.406058 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:21 crc kubenswrapper[4792]: W0121 18:11:21.409722 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34fa69f7_7031_406f_befe_bfaee9844abf.slice/crio-36a57b92e732e048af9d0fcd83d49953a79b03a7b11a6020f6e95d327ca853b1 WatchSource:0}: Error finding container 36a57b92e732e048af9d0fcd83d49953a79b03a7b11a6020f6e95d327ca853b1: Status 404 returned error can't find the container with id 36a57b92e732e048af9d0fcd83d49953a79b03a7b11a6020f6e95d327ca853b1 Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.766921 4792 generic.go:334] "Generic (PLEG): container finished" podID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerID="d08fb4b008ba457dc092b0180237e4b1217f2671d6ecd1d0608b5eab187f043d" exitCode=0 Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.766971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerDied","Data":"d08fb4b008ba457dc092b0180237e4b1217f2671d6ecd1d0608b5eab187f043d"} Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.769235 4792 generic.go:334] "Generic (PLEG): container finished" podID="34fa69f7-7031-406f-befe-bfaee9844abf" containerID="4c005c8a0bc1f85f492e16957734df7d6d2c7e670680f9736692837bbcba7e8f" exitCode=0 Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.769271 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerDied","Data":"4c005c8a0bc1f85f492e16957734df7d6d2c7e670680f9736692837bbcba7e8f"} Jan 21 18:11:21 crc kubenswrapper[4792]: I0121 18:11:21.769300 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerStarted","Data":"36a57b92e732e048af9d0fcd83d49953a79b03a7b11a6020f6e95d327ca853b1"} Jan 21 18:11:22 crc kubenswrapper[4792]: I0121 18:11:22.779029 4792 generic.go:334] "Generic (PLEG): container finished" podID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerID="a290e82f6700178d61d91237f42bb0cc2d03ac02300a5b66c6c3a3443d211401" exitCode=0 Jan 21 18:11:22 crc kubenswrapper[4792]: I0121 18:11:22.779147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerDied","Data":"a290e82f6700178d61d91237f42bb0cc2d03ac02300a5b66c6c3a3443d211401"} Jan 21 18:11:23 crc kubenswrapper[4792]: I0121 18:11:23.788219 4792 generic.go:334] "Generic (PLEG): container finished" podID="34fa69f7-7031-406f-befe-bfaee9844abf" containerID="f5a27d5afe3b662fd0493a04503678de5df955a6593a45630cbae2ab4009ad64" exitCode=0 Jan 21 18:11:23 crc kubenswrapper[4792]: I0121 18:11:23.788398 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerDied","Data":"f5a27d5afe3b662fd0493a04503678de5df955a6593a45630cbae2ab4009ad64"} Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.030120 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.121895 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util\") pod \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.121981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle\") pod \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.122097 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltszt\" (UniqueName: \"kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt\") pod \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\" (UID: \"7cce3a91-d520-418a-86cf-68c8c34cdcb4\") " Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.127198 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle" (OuterVolumeSpecName: "bundle") pod "7cce3a91-d520-418a-86cf-68c8c34cdcb4" (UID: "7cce3a91-d520-418a-86cf-68c8c34cdcb4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.129728 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt" (OuterVolumeSpecName: "kube-api-access-ltszt") pod "7cce3a91-d520-418a-86cf-68c8c34cdcb4" (UID: "7cce3a91-d520-418a-86cf-68c8c34cdcb4"). InnerVolumeSpecName "kube-api-access-ltszt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.133700 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util" (OuterVolumeSpecName: "util") pod "7cce3a91-d520-418a-86cf-68c8c34cdcb4" (UID: "7cce3a91-d520-418a-86cf-68c8c34cdcb4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.223635 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltszt\" (UniqueName: \"kubernetes.io/projected/7cce3a91-d520-418a-86cf-68c8c34cdcb4-kube-api-access-ltszt\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.223681 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.223690 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cce3a91-d520-418a-86cf-68c8c34cdcb4-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.796357 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" event={"ID":"7cce3a91-d520-418a-86cf-68c8c34cdcb4","Type":"ContainerDied","Data":"38536a972a8bb8820f88bbf896dfb01cbcd6ef6d9a9289c20b19549296787c0a"} Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.796405 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.796415 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38536a972a8bb8820f88bbf896dfb01cbcd6ef6d9a9289c20b19549296787c0a" Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.799910 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerStarted","Data":"7653bc5793b60352c6f69f2c957feff632e4cced20a70e734ef3d569a809a88b"} Jan 21 18:11:24 crc kubenswrapper[4792]: I0121 18:11:24.861032 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v2pzt" podStartSLOduration=2.173779488 podStartE2EDuration="4.86099719s" podCreationTimestamp="2026-01-21 18:11:20 +0000 UTC" firstStartedPulling="2026-01-21 18:11:21.770233017 +0000 UTC m=+915.752196203" lastFinishedPulling="2026-01-21 18:11:24.457450719 +0000 UTC m=+918.439413905" observedRunningTime="2026-01-21 18:11:24.856160578 +0000 UTC m=+918.838123774" watchObservedRunningTime="2026-01-21 18:11:24.86099719 +0000 UTC m=+918.842960376" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.849594 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8"] Jan 21 18:11:26 crc kubenswrapper[4792]: E0121 18:11:26.850480 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="pull" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.850502 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="pull" Jan 21 18:11:26 crc kubenswrapper[4792]: E0121 18:11:26.850518 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="extract" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.850529 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="extract" Jan 21 18:11:26 crc kubenswrapper[4792]: E0121 18:11:26.850551 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="util" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.850561 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="util" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.850836 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cce3a91-d520-418a-86cf-68c8c34cdcb4" containerName="extract" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.899601 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8"] Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.899778 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.903102 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.964747 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm27q\" (UniqueName: \"kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.964876 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:26 crc kubenswrapper[4792]: I0121 18:11:26.964918 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.066420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.066499 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.066550 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm27q\" (UniqueName: \"kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.067202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.067285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.090926 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm27q\" (UniqueName: \"kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.222495 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.441477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8"] Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.816959 4792 generic.go:334] "Generic (PLEG): container finished" podID="aa3174c8-ee84-485c-b192-a75530c69e75" containerID="e975aac5ad115e2656e011a201a8e42133dcc693a0e5a58c5807c5e8a6df7ad2" exitCode=0 Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.817034 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" event={"ID":"aa3174c8-ee84-485c-b192-a75530c69e75","Type":"ContainerDied","Data":"e975aac5ad115e2656e011a201a8e42133dcc693a0e5a58c5807c5e8a6df7ad2"} Jan 21 18:11:27 crc kubenswrapper[4792]: I0121 18:11:27.817072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" event={"ID":"aa3174c8-ee84-485c-b192-a75530c69e75","Type":"ContainerStarted","Data":"87218d9c300efd00d967aefff24686240d33f69989c92747384758dfe8633137"} Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.239401 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv"] Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.240563 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.263210 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv"] Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.283101 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.283154 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.283189 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn26j\" (UniqueName: \"kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.384769 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.384869 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.384910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn26j\" (UniqueName: \"kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.385654 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.386062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.407932 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn26j\" (UniqueName: \"kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.559957 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.795434 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv"] Jan 21 18:11:28 crc kubenswrapper[4792]: I0121 18:11:28.851355 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerStarted","Data":"2dd54c192b401cb4a186581c3ab1c0c68ec7bc94900bc4588277491ac29f2804"} Jan 21 18:11:29 crc kubenswrapper[4792]: I0121 18:11:29.866497 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerStarted","Data":"786726acf502127722590eb2735105e15daad6a4c063fbb3564677fe8607a3f3"} Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:30.874268 4792 generic.go:334] "Generic (PLEG): container finished" podID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerID="786726acf502127722590eb2735105e15daad6a4c063fbb3564677fe8607a3f3" exitCode=0 Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:30.874359 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerDied","Data":"786726acf502127722590eb2735105e15daad6a4c063fbb3564677fe8607a3f3"} Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:30.878805 4792 generic.go:334] "Generic (PLEG): container finished" podID="aa3174c8-ee84-485c-b192-a75530c69e75" containerID="53ab2a8d2442d126932938d7772a3f7056973a71a146cce9a50f80fe0e5b10e6" exitCode=0 Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:30.878836 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" event={"ID":"aa3174c8-ee84-485c-b192-a75530c69e75","Type":"ContainerDied","Data":"53ab2a8d2442d126932938d7772a3f7056973a71a146cce9a50f80fe0e5b10e6"} Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.109690 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.110304 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.257707 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.796025 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.799271 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.833221 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.833301 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmlx2\" (UniqueName: \"kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.833335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.835297 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.919022 4792 generic.go:334] "Generic (PLEG): container finished" podID="aa3174c8-ee84-485c-b192-a75530c69e75" containerID="35bb8d943f9d80431905d299be88018fdae87ad15b947f04f47242e0a7ea8144" exitCode=0 Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.920335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" event={"ID":"aa3174c8-ee84-485c-b192-a75530c69e75","Type":"ContainerDied","Data":"35bb8d943f9d80431905d299be88018fdae87ad15b947f04f47242e0a7ea8144"} Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.935091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.935180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmlx2\" (UniqueName: \"kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.935215 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.935719 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.936069 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:31 crc kubenswrapper[4792]: I0121 18:11:31.965052 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmlx2\" (UniqueName: \"kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2\") pod \"certified-operators-z7lgv\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:32 crc kubenswrapper[4792]: I0121 18:11:32.195366 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:32 crc kubenswrapper[4792]: I0121 18:11:32.236073 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:33 crc kubenswrapper[4792]: I0121 18:11:33.733650 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:11:33 crc kubenswrapper[4792]: W0121 18:11:33.919500 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12ee0724_b2e1_483a_b743_2f2cda1be892.slice/crio-a086ea463fe64516d432a4adc0b645bcba2c9760d57fffff4ea6199f99cd0e27 WatchSource:0}: Error finding container a086ea463fe64516d432a4adc0b645bcba2c9760d57fffff4ea6199f99cd0e27: Status 404 returned error can't find the container with id a086ea463fe64516d432a4adc0b645bcba2c9760d57fffff4ea6199f99cd0e27 Jan 21 18:11:33 crc kubenswrapper[4792]: I0121 18:11:33.941761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerStarted","Data":"a086ea463fe64516d432a4adc0b645bcba2c9760d57fffff4ea6199f99cd0e27"} Jan 21 18:11:34 crc kubenswrapper[4792]: I0121 18:11:34.955031 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerStarted","Data":"22a67805cab763a85d9e62589684aea644d084ad640c87762de34d8294146ec7"} Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.002227 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.114351 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm27q\" (UniqueName: \"kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q\") pod \"aa3174c8-ee84-485c-b192-a75530c69e75\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.114414 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util\") pod \"aa3174c8-ee84-485c-b192-a75530c69e75\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.114566 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle\") pod \"aa3174c8-ee84-485c-b192-a75530c69e75\" (UID: \"aa3174c8-ee84-485c-b192-a75530c69e75\") " Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.115618 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle" (OuterVolumeSpecName: "bundle") pod "aa3174c8-ee84-485c-b192-a75530c69e75" (UID: "aa3174c8-ee84-485c-b192-a75530c69e75"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.133481 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q" (OuterVolumeSpecName: "kube-api-access-rm27q") pod "aa3174c8-ee84-485c-b192-a75530c69e75" (UID: "aa3174c8-ee84-485c-b192-a75530c69e75"). InnerVolumeSpecName "kube-api-access-rm27q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.216130 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm27q\" (UniqueName: \"kubernetes.io/projected/aa3174c8-ee84-485c-b192-a75530c69e75-kube-api-access-rm27q\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.216174 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.371345 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util" (OuterVolumeSpecName: "util") pod "aa3174c8-ee84-485c-b192-a75530c69e75" (UID: "aa3174c8-ee84-485c-b192-a75530c69e75"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.398438 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.398725 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v2pzt" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="registry-server" containerID="cri-o://7653bc5793b60352c6f69f2c957feff632e4cced20a70e734ef3d569a809a88b" gracePeriod=2 Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.418570 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aa3174c8-ee84-485c-b192-a75530c69e75-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.964323 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.967218 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8" event={"ID":"aa3174c8-ee84-485c-b192-a75530c69e75","Type":"ContainerDied","Data":"87218d9c300efd00d967aefff24686240d33f69989c92747384758dfe8633137"} Jan 21 18:11:35 crc kubenswrapper[4792]: I0121 18:11:35.967291 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87218d9c300efd00d967aefff24686240d33f69989c92747384758dfe8633137" Jan 21 18:11:37 crc kubenswrapper[4792]: I0121 18:11:37.003493 4792 generic.go:334] "Generic (PLEG): container finished" podID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerID="22a67805cab763a85d9e62589684aea644d084ad640c87762de34d8294146ec7" exitCode=0 Jan 21 18:11:37 crc kubenswrapper[4792]: I0121 18:11:37.003578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerDied","Data":"22a67805cab763a85d9e62589684aea644d084ad640c87762de34d8294146ec7"} Jan 21 18:11:37 crc kubenswrapper[4792]: I0121 18:11:37.020131 4792 generic.go:334] "Generic (PLEG): container finished" podID="34fa69f7-7031-406f-befe-bfaee9844abf" containerID="7653bc5793b60352c6f69f2c957feff632e4cced20a70e734ef3d569a809a88b" exitCode=0 Jan 21 18:11:37 crc kubenswrapper[4792]: I0121 18:11:37.020168 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerDied","Data":"7653bc5793b60352c6f69f2c957feff632e4cced20a70e734ef3d569a809a88b"} Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.889746 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm"] Jan 21 18:11:38 crc kubenswrapper[4792]: E0121 18:11:38.890273 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="util" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.890286 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="util" Jan 21 18:11:38 crc kubenswrapper[4792]: E0121 18:11:38.890304 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="extract" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.890311 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="extract" Jan 21 18:11:38 crc kubenswrapper[4792]: E0121 18:11:38.890323 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="pull" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.890329 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="pull" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.890422 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa3174c8-ee84-485c-b192-a75530c69e75" containerName="extract" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.890815 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.893296 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.895428 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.895546 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-x2bch" Jan 21 18:11:38 crc kubenswrapper[4792]: I0121 18:11:38.905915 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.034914 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.035620 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.039072 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z745s\" (UniqueName: \"kubernetes.io/projected/83aca1d7-0d0d-48b0-9fcf-e63c7643ff01-kube-api-access-z745s\") pod \"obo-prometheus-operator-68bc856cb9-7chrm\" (UID: \"83aca1d7-0d0d-48b0-9fcf-e63c7643ff01\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.039810 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-bmgzs" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.043077 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.051911 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.052809 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.054912 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.087056 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.140483 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.140599 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.140654 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z745s\" (UniqueName: \"kubernetes.io/projected/83aca1d7-0d0d-48b0-9fcf-e63c7643ff01-kube-api-access-z745s\") pod \"obo-prometheus-operator-68bc856cb9-7chrm\" (UID: \"83aca1d7-0d0d-48b0-9fcf-e63c7643ff01\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.197321 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z745s\" (UniqueName: \"kubernetes.io/projected/83aca1d7-0d0d-48b0-9fcf-e63c7643ff01-kube-api-access-z745s\") pod \"obo-prometheus-operator-68bc856cb9-7chrm\" (UID: \"83aca1d7-0d0d-48b0-9fcf-e63c7643ff01\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.210112 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.241484 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.241562 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.241588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.241617 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.245889 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.246196 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ac3337f1-d2aa-4663-8ad8-13a3e1675c57-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5\" (UID: \"ac3337f1-d2aa-4663-8ad8-13a3e1675c57\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.293087 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-c5qnh"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.293905 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.300495 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.300738 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-88lhh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.340987 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-c5qnh"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.415302 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.416038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.416154 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.435584 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.442020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf96d731-c320-48de-9b50-8983d34908e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85\" (UID: \"cf96d731-c320-48de-9b50-8983d34908e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.517202 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr7jn\" (UniqueName: \"kubernetes.io/projected/52155773-3679-4730-b9e5-8906156cc494-kube-api-access-jr7jn\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.517327 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/52155773-3679-4730-b9e5-8906156cc494-observability-operator-tls\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.600763 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qk94m"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.601703 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.604398 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-nn8br" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.618297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr7jn\" (UniqueName: \"kubernetes.io/projected/52155773-3679-4730-b9e5-8906156cc494-kube-api-access-jr7jn\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.618376 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/52155773-3679-4730-b9e5-8906156cc494-observability-operator-tls\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.622201 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/52155773-3679-4730-b9e5-8906156cc494-observability-operator-tls\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.646400 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr7jn\" (UniqueName: \"kubernetes.io/projected/52155773-3679-4730-b9e5-8906156cc494-kube-api-access-jr7jn\") pod \"observability-operator-59bdc8b94-c5qnh\" (UID: \"52155773-3679-4730-b9e5-8906156cc494\") " pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.658142 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qk94m"] Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.671776 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.679223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.720695 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/86888bdd-6434-4df1-9e87-276f89a48723-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.720949 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsxqt\" (UniqueName: \"kubernetes.io/projected/86888bdd-6434-4df1-9e87-276f89a48723-kube-api-access-nsxqt\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.822454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsxqt\" (UniqueName: \"kubernetes.io/projected/86888bdd-6434-4df1-9e87-276f89a48723-kube-api-access-nsxqt\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.822567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/86888bdd-6434-4df1-9e87-276f89a48723-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.823540 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/86888bdd-6434-4df1-9e87-276f89a48723-openshift-service-ca\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.848317 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsxqt\" (UniqueName: \"kubernetes.io/projected/86888bdd-6434-4df1-9e87-276f89a48723-kube-api-access-nsxqt\") pod \"perses-operator-5bf474d74f-qk94m\" (UID: \"86888bdd-6434-4df1-9e87-276f89a48723\") " pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:39 crc kubenswrapper[4792]: I0121 18:11:39.923700 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.872197 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.949417 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lkzx\" (UniqueName: \"kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx\") pod \"34fa69f7-7031-406f-befe-bfaee9844abf\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.949559 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content\") pod \"34fa69f7-7031-406f-befe-bfaee9844abf\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.949613 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities\") pod \"34fa69f7-7031-406f-befe-bfaee9844abf\" (UID: \"34fa69f7-7031-406f-befe-bfaee9844abf\") " Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.956372 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities" (OuterVolumeSpecName: "utilities") pod "34fa69f7-7031-406f-befe-bfaee9844abf" (UID: "34fa69f7-7031-406f-befe-bfaee9844abf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:40 crc kubenswrapper[4792]: I0121 18:11:40.969120 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx" (OuterVolumeSpecName: "kube-api-access-5lkzx") pod "34fa69f7-7031-406f-befe-bfaee9844abf" (UID: "34fa69f7-7031-406f-befe-bfaee9844abf"). InnerVolumeSpecName "kube-api-access-5lkzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.052143 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lkzx\" (UniqueName: \"kubernetes.io/projected/34fa69f7-7031-406f-befe-bfaee9844abf-kube-api-access-5lkzx\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.052180 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.069368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v2pzt" event={"ID":"34fa69f7-7031-406f-befe-bfaee9844abf","Type":"ContainerDied","Data":"36a57b92e732e048af9d0fcd83d49953a79b03a7b11a6020f6e95d327ca853b1"} Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.069428 4792 scope.go:117] "RemoveContainer" containerID="7653bc5793b60352c6f69f2c957feff632e4cced20a70e734ef3d569a809a88b" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.069569 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v2pzt" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.101105 4792 scope.go:117] "RemoveContainer" containerID="f5a27d5afe3b662fd0493a04503678de5df955a6593a45630cbae2ab4009ad64" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.139909 4792 scope.go:117] "RemoveContainer" containerID="4c005c8a0bc1f85f492e16957734df7d6d2c7e670680f9736692837bbcba7e8f" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.228065 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34fa69f7-7031-406f-befe-bfaee9844abf" (UID: "34fa69f7-7031-406f-befe-bfaee9844abf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.266602 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34fa69f7-7031-406f-befe-bfaee9844abf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.444651 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.448656 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v2pzt"] Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.797308 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm"] Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.931574 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-qk94m"] Jan 21 18:11:41 crc kubenswrapper[4792]: I0121 18:11:41.962656 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-c5qnh"] Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.122988 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" event={"ID":"83aca1d7-0d0d-48b0-9fcf-e63c7643ff01","Type":"ContainerStarted","Data":"24b98d669b5115e2e15d27ff2ee50ba0a67960fd062c7442096d3dea405f8d5d"} Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.130674 4792 generic.go:334] "Generic (PLEG): container finished" podID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerID="e813dd238c7e3b83ac257dfdbb4b88443de3534e0ade3d39fe1dc2ba681a8609" exitCode=0 Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.130729 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerDied","Data":"e813dd238c7e3b83ac257dfdbb4b88443de3534e0ade3d39fe1dc2ba681a8609"} Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.135725 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" event={"ID":"52155773-3679-4730-b9e5-8906156cc494","Type":"ContainerStarted","Data":"22701d117f5ff3ec52959c138c483837fc3308e96e47c30bb8d8319d83853000"} Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.138309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerStarted","Data":"e14e8732c0eca4646f8dee178e366be955f241564606b5a44ceeb10fc1a2e2e8"} Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.144277 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" event={"ID":"86888bdd-6434-4df1-9e87-276f89a48723","Type":"ContainerStarted","Data":"163f5764c2e1607092825baf16ecaef0653356e2f476d4498efe044b716c566e"} Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.169029 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85"] Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.278664 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" path="/var/lib/kubelet/pods/34fa69f7-7031-406f-befe-bfaee9844abf/volumes" Jan 21 18:11:42 crc kubenswrapper[4792]: I0121 18:11:42.300132 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5"] Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.092244 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-c6c8576bf-2cs9f"] Jan 21 18:11:43 crc kubenswrapper[4792]: E0121 18:11:43.093182 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="registry-server" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.093207 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="registry-server" Jan 21 18:11:43 crc kubenswrapper[4792]: E0121 18:11:43.093227 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="extract-content" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.093235 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="extract-content" Jan 21 18:11:43 crc kubenswrapper[4792]: E0121 18:11:43.093262 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="extract-utilities" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.093271 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="extract-utilities" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.093403 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="34fa69f7-7031-406f-befe-bfaee9844abf" containerName="registry-server" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.094064 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.101091 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.102147 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.102307 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.102464 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-xk2hl" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.121405 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-c6c8576bf-2cs9f"] Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.201290 4792 generic.go:334] "Generic (PLEG): container finished" podID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerID="e14e8732c0eca4646f8dee178e366be955f241564606b5a44ceeb10fc1a2e2e8" exitCode=0 Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.201443 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerDied","Data":"e14e8732c0eca4646f8dee178e366be955f241564606b5a44ceeb10fc1a2e2e8"} Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.204110 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" event={"ID":"cf96d731-c320-48de-9b50-8983d34908e4","Type":"ContainerStarted","Data":"ff3a0aba9dd90786ed1550f1da4ac1c702b2dc64f3b02795944a66ea0753410b"} Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.207304 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" event={"ID":"ac3337f1-d2aa-4663-8ad8-13a3e1675c57","Type":"ContainerStarted","Data":"47f82908d9734914b52e3a5348da3a7b8f3f856e5ea54383b3f08549b1217d7e"} Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.215793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-apiservice-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.215943 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z4b6\" (UniqueName: \"kubernetes.io/projected/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-kube-api-access-5z4b6\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.215969 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-webhook-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.228190 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerStarted","Data":"78ce69327deb86e91acf254ee4e4e7752ac55fe597944ad6183b997bda003d74"} Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.284613 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" podStartSLOduration=5.290252097 podStartE2EDuration="15.284585836s" podCreationTimestamp="2026-01-21 18:11:28 +0000 UTC" firstStartedPulling="2026-01-21 18:11:30.876758851 +0000 UTC m=+924.858722037" lastFinishedPulling="2026-01-21 18:11:40.87109259 +0000 UTC m=+934.853055776" observedRunningTime="2026-01-21 18:11:43.283069725 +0000 UTC m=+937.265032931" watchObservedRunningTime="2026-01-21 18:11:43.284585836 +0000 UTC m=+937.266549022" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.317741 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z4b6\" (UniqueName: \"kubernetes.io/projected/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-kube-api-access-5z4b6\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.317808 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-webhook-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.317846 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-apiservice-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.375417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z4b6\" (UniqueName: \"kubernetes.io/projected/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-kube-api-access-5z4b6\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.451466 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-apiservice-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.455161 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4b21aeae-2f5f-4259-9fc7-185b0bbbd98e-webhook-cert\") pod \"elastic-operator-c6c8576bf-2cs9f\" (UID: \"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e\") " pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:43 crc kubenswrapper[4792]: I0121 18:11:43.720961 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" Jan 21 18:11:44 crc kubenswrapper[4792]: I0121 18:11:44.271672 4792 generic.go:334] "Generic (PLEG): container finished" podID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerID="78ce69327deb86e91acf254ee4e4e7752ac55fe597944ad6183b997bda003d74" exitCode=0 Jan 21 18:11:44 crc kubenswrapper[4792]: I0121 18:11:44.288645 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerDied","Data":"78ce69327deb86e91acf254ee4e4e7752ac55fe597944ad6183b997bda003d74"} Jan 21 18:11:44 crc kubenswrapper[4792]: I0121 18:11:44.289743 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerStarted","Data":"ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf"} Jan 21 18:11:44 crc kubenswrapper[4792]: I0121 18:11:44.361953 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z7lgv" podStartSLOduration=10.237581127 podStartE2EDuration="13.361934431s" podCreationTimestamp="2026-01-21 18:11:31 +0000 UTC" firstStartedPulling="2026-01-21 18:11:40.582768644 +0000 UTC m=+934.564731830" lastFinishedPulling="2026-01-21 18:11:43.707121948 +0000 UTC m=+937.689085134" observedRunningTime="2026-01-21 18:11:44.360590205 +0000 UTC m=+938.342553401" watchObservedRunningTime="2026-01-21 18:11:44.361934431 +0000 UTC m=+938.343897617" Jan 21 18:11:44 crc kubenswrapper[4792]: I0121 18:11:44.802899 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-c6c8576bf-2cs9f"] Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.292349 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" event={"ID":"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e","Type":"ContainerStarted","Data":"a64955d3c46ebc93916a4f9760ccd0cb5cbcbcc908d773d8ae7cdd1fe5d17f12"} Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.729033 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.879559 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle\") pod \"5e215ddb-3fd1-4111-bee0-7758814f6916\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.879826 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pn26j\" (UniqueName: \"kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j\") pod \"5e215ddb-3fd1-4111-bee0-7758814f6916\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.879852 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util\") pod \"5e215ddb-3fd1-4111-bee0-7758814f6916\" (UID: \"5e215ddb-3fd1-4111-bee0-7758814f6916\") " Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.880913 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle" (OuterVolumeSpecName: "bundle") pod "5e215ddb-3fd1-4111-bee0-7758814f6916" (UID: "5e215ddb-3fd1-4111-bee0-7758814f6916"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.899054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util" (OuterVolumeSpecName: "util") pod "5e215ddb-3fd1-4111-bee0-7758814f6916" (UID: "5e215ddb-3fd1-4111-bee0-7758814f6916"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.910468 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j" (OuterVolumeSpecName: "kube-api-access-pn26j") pod "5e215ddb-3fd1-4111-bee0-7758814f6916" (UID: "5e215ddb-3fd1-4111-bee0-7758814f6916"). InnerVolumeSpecName "kube-api-access-pn26j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.981612 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pn26j\" (UniqueName: \"kubernetes.io/projected/5e215ddb-3fd1-4111-bee0-7758814f6916-kube-api-access-pn26j\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.981666 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:45 crc kubenswrapper[4792]: I0121 18:11:45.981687 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5e215ddb-3fd1-4111-bee0-7758814f6916-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:46 crc kubenswrapper[4792]: I0121 18:11:46.316262 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" event={"ID":"5e215ddb-3fd1-4111-bee0-7758814f6916","Type":"ContainerDied","Data":"2dd54c192b401cb4a186581c3ab1c0c68ec7bc94900bc4588277491ac29f2804"} Jan 21 18:11:46 crc kubenswrapper[4792]: I0121 18:11:46.316306 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dd54c192b401cb4a186581c3ab1c0c68ec7bc94900bc4588277491ac29f2804" Jan 21 18:11:46 crc kubenswrapper[4792]: I0121 18:11:46.316385 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv" Jan 21 18:11:52 crc kubenswrapper[4792]: I0121 18:11:52.236844 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:52 crc kubenswrapper[4792]: I0121 18:11:52.239107 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:52 crc kubenswrapper[4792]: I0121 18:11:52.293236 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:52 crc kubenswrapper[4792]: I0121 18:11:52.668308 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:11:55 crc kubenswrapper[4792]: I0121 18:11:55.171259 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:11:55 crc kubenswrapper[4792]: I0121 18:11:55.417921 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z7lgv" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="registry-server" containerID="cri-o://ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" gracePeriod=2 Jan 21 18:11:56 crc kubenswrapper[4792]: I0121 18:11:56.430413 4792 generic.go:334] "Generic (PLEG): container finished" podID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerID="ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" exitCode=0 Jan 21 18:11:56 crc kubenswrapper[4792]: I0121 18:11:56.430479 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerDied","Data":"ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf"} Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.102020 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8" Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.102243 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nsxqt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5bf474d74f-qk94m_openshift-operators(86888bdd-6434-4df1-9e87-276f89a48723): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.103531 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" podUID="86888bdd-6434-4df1-9e87-276f89a48723" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.330540 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p"] Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.330804 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="pull" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.330818 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="pull" Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.330844 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="extract" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.330875 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="extract" Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.330884 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="util" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.330890 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="util" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.331016 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e215ddb-3fd1-4111-bee0-7758814f6916" containerName="extract" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.331629 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.336182 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-mjk22" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.336272 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.336351 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.361083 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p"] Jan 21 18:11:59 crc kubenswrapper[4792]: E0121 18:11:59.460202 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8\\\"\"" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" podUID="86888bdd-6434-4df1-9e87-276f89a48723" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.491445 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t77xp\" (UniqueName: \"kubernetes.io/projected/a80e010d-2623-42fc-9fcb-cb8c37f697b3-kube-api-access-t77xp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.491511 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a80e010d-2623-42fc-9fcb-cb8c37f697b3-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.593420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a80e010d-2623-42fc-9fcb-cb8c37f697b3-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.593624 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t77xp\" (UniqueName: \"kubernetes.io/projected/a80e010d-2623-42fc-9fcb-cb8c37f697b3-kube-api-access-t77xp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.594184 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a80e010d-2623-42fc-9fcb-cb8c37f697b3-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.626891 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t77xp\" (UniqueName: \"kubernetes.io/projected/a80e010d-2623-42fc-9fcb-cb8c37f697b3-kube-api-access-t77xp\") pod \"cert-manager-operator-controller-manager-5446d6888b-vdn7p\" (UID: \"a80e010d-2623-42fc-9fcb-cb8c37f697b3\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:11:59 crc kubenswrapper[4792]: I0121 18:11:59.651282 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.238213 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf is running failed: container process not found" containerID="ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" cmd=["grpc_health_probe","-addr=:50051"] Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.239579 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf is running failed: container process not found" containerID="ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" cmd=["grpc_health_probe","-addr=:50051"] Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.240436 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf is running failed: container process not found" containerID="ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" cmd=["grpc_health_probe","-addr=:50051"] Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.240474 4792 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-z7lgv" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="registry-server" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.288094 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.288451 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85_openshift-operators(cf96d731-c320-48de-9b50-8983d34908e4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.289910 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" podUID="cf96d731-c320-48de-9b50-8983d34908e4" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.296701 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.296969 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5_openshift-operators(ac3337f1-d2aa-4663-8ad8-13a3e1675c57): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.299143 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" podUID="ac3337f1-d2aa-4663-8ad8-13a3e1675c57" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.484047 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" podUID="ac3337f1-d2aa-4663-8ad8-13a3e1675c57" Jan 21 18:12:02 crc kubenswrapper[4792]: E0121 18:12:02.484839 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" podUID="cf96d731-c320-48de-9b50-8983d34908e4" Jan 21 18:12:08 crc kubenswrapper[4792]: E0121 18:12:08.589632 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c" Jan 21 18:12:08 crc kubenswrapper[4792]: E0121 18:12:08.591033 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:dc62889b883f597de91b5389cc52c84c607247d49a807693be2f688e4703dfc3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:1b555e21bba7c609111ace4380382a696d9aceeb6e9816bf9023b8f689b6c741,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:a223bab813b82d698992490bbb60927f6288a83ba52d539836c250e1471f6d34,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:e797cdb47beef40b04da7b6d645bca3dc32e6247003c45b56b38efd9e13bf01c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:093d2731ac848ed5fd57356b155a19d3bf7b8db96d95b09c5d0095e143f7254f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:7d662a120305e2528acc7e9142b770b5b6a7f4932ddfcadfa4ac953935124895,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:75465aabb0aa427a5c531a8fcde463f6d119afbcc618ebcbf6b7ee9bc8aad160,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:dc18c8d6a4a9a0a574a57cc5082c8a9b26023bd6d69b9732892d584c1dfe5070,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:369729978cecdc13c99ef3d179f8eb8a450a4a0cb70b63c27a55a15d1710ba27,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:d8c7a61d147f62b204d5c5f16864386025393453c9a81ea327bbd25d7765d611,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:b4a6eb1cc118a4334b424614959d8b7f361ddd779b3a72690ca49b0a3f26d9b8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:21d4fff670893ba4b7fbc528cd49f8b71c8281cede9ef84f0697065bb6a7fc50,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:12d9dbe297a1c3b9df671f21156992082bc483887d851fafe76e5d17321ff474,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:e65c37f04f6d76a0cbfe05edb3cddf6a8f14f859ee35cf3aebea8fcb991d2c19,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:48e4e178c6eeaa9d5dd77a591c185a311b4b4a5caadb7199d48463123e31dc9e,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jr7jn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-59bdc8b94-c5qnh_openshift-operators(52155773-3679-4730-b9e5-8906156cc494): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:12:08 crc kubenswrapper[4792]: E0121 18:12:08.592429 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" podUID="52155773-3679-4730-b9e5-8906156cc494" Jan 21 18:12:09 crc kubenswrapper[4792]: E0121 18:12:09.486238 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a" Jan 21 18:12:09 crc kubenswrapper[4792]: E0121 18:12:09.486709 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator --watch-referenced-objects-in-all-namespaces=true --disable-unmanaged-prometheus-configuration=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z745s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-68bc856cb9-7chrm_openshift-operators(83aca1d7-0d0d-48b0-9fcf-e63c7643ff01): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:12:09 crc kubenswrapper[4792]: E0121 18:12:09.487827 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" podUID="83aca1d7-0d0d-48b0-9fcf-e63c7643ff01" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.547190 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.548875 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7lgv" event={"ID":"12ee0724-b2e1-483a-b743-2f2cda1be892","Type":"ContainerDied","Data":"a086ea463fe64516d432a4adc0b645bcba2c9760d57fffff4ea6199f99cd0e27"} Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.548951 4792 scope.go:117] "RemoveContainer" containerID="ba1a5082041eef25688b703f17c169210729353a6dd75d36dc2eda92fe086edf" Jan 21 18:12:09 crc kubenswrapper[4792]: E0121 18:12:09.551533 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:2ecf763b02048d2cf4c17967a7b2cacc7afd6af0e963a39579d876f8f4170e3c\\\"\"" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" podUID="52155773-3679-4730-b9e5-8906156cc494" Jan 21 18:12:09 crc kubenswrapper[4792]: E0121 18:12:09.551587 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a\\\"\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" podUID="83aca1d7-0d0d-48b0-9fcf-e63c7643ff01" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.598735 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmlx2\" (UniqueName: \"kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2\") pod \"12ee0724-b2e1-483a-b743-2f2cda1be892\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.598945 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities\") pod \"12ee0724-b2e1-483a-b743-2f2cda1be892\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.599079 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content\") pod \"12ee0724-b2e1-483a-b743-2f2cda1be892\" (UID: \"12ee0724-b2e1-483a-b743-2f2cda1be892\") " Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.600870 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities" (OuterVolumeSpecName: "utilities") pod "12ee0724-b2e1-483a-b743-2f2cda1be892" (UID: "12ee0724-b2e1-483a-b743-2f2cda1be892"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.616558 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2" (OuterVolumeSpecName: "kube-api-access-dmlx2") pod "12ee0724-b2e1-483a-b743-2f2cda1be892" (UID: "12ee0724-b2e1-483a-b743-2f2cda1be892"). InnerVolumeSpecName "kube-api-access-dmlx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.630103 4792 scope.go:117] "RemoveContainer" containerID="e14e8732c0eca4646f8dee178e366be955f241564606b5a44ceeb10fc1a2e2e8" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.655665 4792 scope.go:117] "RemoveContainer" containerID="22a67805cab763a85d9e62589684aea644d084ad640c87762de34d8294146ec7" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.684012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12ee0724-b2e1-483a-b743-2f2cda1be892" (UID: "12ee0724-b2e1-483a-b743-2f2cda1be892"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.700813 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.700895 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ee0724-b2e1-483a-b743-2f2cda1be892-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.700914 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmlx2\" (UniqueName: \"kubernetes.io/projected/12ee0724-b2e1-483a-b743-2f2cda1be892-kube-api-access-dmlx2\") on node \"crc\" DevicePath \"\"" Jan 21 18:12:09 crc kubenswrapper[4792]: I0121 18:12:09.775674 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p"] Jan 21 18:12:09 crc kubenswrapper[4792]: W0121 18:12:09.778309 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda80e010d_2623_42fc_9fcb_cb8c37f697b3.slice/crio-6611bb0beb190515e2963d2e742ed5ee83dfec05fa17a95fdc84b4de9ade5dff WatchSource:0}: Error finding container 6611bb0beb190515e2963d2e742ed5ee83dfec05fa17a95fdc84b4de9ade5dff: Status 404 returned error can't find the container with id 6611bb0beb190515e2963d2e742ed5ee83dfec05fa17a95fdc84b4de9ade5dff Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.555984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" event={"ID":"a80e010d-2623-42fc-9fcb-cb8c37f697b3","Type":"ContainerStarted","Data":"6611bb0beb190515e2963d2e742ed5ee83dfec05fa17a95fdc84b4de9ade5dff"} Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.557962 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" event={"ID":"4b21aeae-2f5f-4259-9fc7-185b0bbbd98e","Type":"ContainerStarted","Data":"8cf049cd098fe99b021e9017e3b3608b76a0c67f4bd15b8495954ddcbf237365"} Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.561663 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7lgv" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.588705 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-c6c8576bf-2cs9f" podStartSLOduration=2.9383274520000002 podStartE2EDuration="27.588675105s" podCreationTimestamp="2026-01-21 18:11:43 +0000 UTC" firstStartedPulling="2026-01-21 18:11:44.840069735 +0000 UTC m=+938.822032911" lastFinishedPulling="2026-01-21 18:12:09.490417378 +0000 UTC m=+963.472380564" observedRunningTime="2026-01-21 18:12:10.578364873 +0000 UTC m=+964.560328079" watchObservedRunningTime="2026-01-21 18:12:10.588675105 +0000 UTC m=+964.570638291" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.609910 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.619906 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z7lgv"] Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.836752 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 21 18:12:10 crc kubenswrapper[4792]: E0121 18:12:10.837604 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="registry-server" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.837690 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="registry-server" Jan 21 18:12:10 crc kubenswrapper[4792]: E0121 18:12:10.837814 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="extract-utilities" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.837976 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="extract-utilities" Jan 21 18:12:10 crc kubenswrapper[4792]: E0121 18:12:10.838094 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="extract-content" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.838172 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="extract-content" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.838383 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" containerName="registry-server" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.839584 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.843069 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.843283 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.843419 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.843676 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.843965 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.844234 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.844413 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.844586 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.844719 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-qp4vb" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.861113 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920501 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920600 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920642 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920665 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920691 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920837 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.920947 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921040 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921153 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921226 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921267 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:10 crc kubenswrapper[4792]: I0121 18:12:10.921416 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022466 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022532 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022556 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022576 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022615 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022667 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022686 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022709 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022750 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022779 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022799 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022818 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022839 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.022875 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.023383 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.023383 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.024126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.024140 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.030069 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.031269 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.033712 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.034088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.036567 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.038118 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.039292 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.041616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.043365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.054543 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.063294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ca7f01d9-70c3-477f-81cd-46e32d6fafa1-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ca7f01d9-70c3-477f-81cd-46e32d6fafa1\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.160527 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.526111 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 21 18:12:11 crc kubenswrapper[4792]: I0121 18:12:11.572076 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerStarted","Data":"6bc77e03bbe8c079a791912ae1c22dfaba43c5f849bd99b2b9f54a50ec12f52f"} Jan 21 18:12:12 crc kubenswrapper[4792]: I0121 18:12:12.258372 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12ee0724-b2e1-483a-b743-2f2cda1be892" path="/var/lib/kubelet/pods/12ee0724-b2e1-483a-b743-2f2cda1be892/volumes" Jan 21 18:12:23 crc kubenswrapper[4792]: I0121 18:12:23.570750 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:12:23 crc kubenswrapper[4792]: I0121 18:12:23.571310 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.934097 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" event={"ID":"83aca1d7-0d0d-48b0-9fcf-e63c7643ff01","Type":"ContainerStarted","Data":"fcf815e25c426f73be5c8f2e36b65b7bc15dcdf184d8ae338f1b6ad8eec473f3"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.938256 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" event={"ID":"a80e010d-2623-42fc-9fcb-cb8c37f697b3","Type":"ContainerStarted","Data":"40db7a9cb3ce21c69177f8ee7fc756100aef9949462e2f5b446b240198020b3f"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.940220 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" event={"ID":"52155773-3679-4730-b9e5-8906156cc494","Type":"ContainerStarted","Data":"9e70b0de6cc63b7471af7723f69adaca9c60e2a6ed796080f3f61bd312d51046"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.940782 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.943462 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" event={"ID":"86888bdd-6434-4df1-9e87-276f89a48723","Type":"ContainerStarted","Data":"ab92d4b63af2d943324315bc4c38c4dce102cdd546ea960e3f0a02b5108d897a"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.943876 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.946147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" event={"ID":"cf96d731-c320-48de-9b50-8983d34908e4","Type":"ContainerStarted","Data":"af04338391c72dccb6993109b81d5c81dcb3edb2a058470913843052f93eed17"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.948993 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerStarted","Data":"e8fb2b4b0f6d92dbebf920bec35948cb77487be033db7447bd1b5acdc7cb77ec"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.951243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" event={"ID":"ac3337f1-d2aa-4663-8ad8-13a3e1675c57","Type":"ContainerStarted","Data":"214eba648647296319069beb12acd6b0168ba7604153ff10301362196d54337a"} Jan 21 18:12:32 crc kubenswrapper[4792]: I0121 18:12:32.967518 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-7chrm" podStartSLOduration=4.6312996 podStartE2EDuration="54.967484223s" podCreationTimestamp="2026-01-21 18:11:38 +0000 UTC" firstStartedPulling="2026-01-21 18:11:41.803600868 +0000 UTC m=+935.785564054" lastFinishedPulling="2026-01-21 18:12:32.139785491 +0000 UTC m=+986.121748677" observedRunningTime="2026-01-21 18:12:32.964337799 +0000 UTC m=+986.946300985" watchObservedRunningTime="2026-01-21 18:12:32.967484223 +0000 UTC m=+986.949447409" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.029626 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85" podStartSLOduration=4.603963542 podStartE2EDuration="54.029600044s" podCreationTimestamp="2026-01-21 18:11:39 +0000 UTC" firstStartedPulling="2026-01-21 18:11:42.208091926 +0000 UTC m=+936.190055112" lastFinishedPulling="2026-01-21 18:12:31.633728428 +0000 UTC m=+985.615691614" observedRunningTime="2026-01-21 18:12:33.023131 +0000 UTC m=+987.005094196" watchObservedRunningTime="2026-01-21 18:12:33.029600044 +0000 UTC m=+987.011563231" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.050680 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5" podStartSLOduration=13.133162273 podStartE2EDuration="54.050666278s" podCreationTimestamp="2026-01-21 18:11:39 +0000 UTC" firstStartedPulling="2026-01-21 18:11:42.355104742 +0000 UTC m=+936.337067928" lastFinishedPulling="2026-01-21 18:12:23.272608737 +0000 UTC m=+977.254571933" observedRunningTime="2026-01-21 18:12:33.048755769 +0000 UTC m=+987.030718965" watchObservedRunningTime="2026-01-21 18:12:33.050666278 +0000 UTC m=+987.032629464" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.091994 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-vdn7p" podStartSLOduration=20.601758332 podStartE2EDuration="34.091978815s" podCreationTimestamp="2026-01-21 18:11:59 +0000 UTC" firstStartedPulling="2026-01-21 18:12:09.782349653 +0000 UTC m=+963.764312839" lastFinishedPulling="2026-01-21 18:12:23.272570136 +0000 UTC m=+977.254533322" observedRunningTime="2026-01-21 18:12:33.089355091 +0000 UTC m=+987.071318277" watchObservedRunningTime="2026-01-21 18:12:33.091978815 +0000 UTC m=+987.073942001" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.135123 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" podStartSLOduration=4.4192032900000005 podStartE2EDuration="54.135100788s" podCreationTimestamp="2026-01-21 18:11:39 +0000 UTC" firstStartedPulling="2026-01-21 18:11:42.003116182 +0000 UTC m=+935.985079368" lastFinishedPulling="2026-01-21 18:12:31.71901368 +0000 UTC m=+985.700976866" observedRunningTime="2026-01-21 18:12:33.129292678 +0000 UTC m=+987.111255864" watchObservedRunningTime="2026-01-21 18:12:33.135100788 +0000 UTC m=+987.117063974" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.147036 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.166826 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-c5qnh" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.174055 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" podStartSLOduration=12.863177188 podStartE2EDuration="54.17403792s" podCreationTimestamp="2026-01-21 18:11:39 +0000 UTC" firstStartedPulling="2026-01-21 18:11:41.96214027 +0000 UTC m=+935.944103456" lastFinishedPulling="2026-01-21 18:12:23.273000992 +0000 UTC m=+977.254964188" observedRunningTime="2026-01-21 18:12:33.17239168 +0000 UTC m=+987.154354866" watchObservedRunningTime="2026-01-21 18:12:33.17403792 +0000 UTC m=+987.156001106" Jan 21 18:12:33 crc kubenswrapper[4792]: I0121 18:12:33.198984 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 21 18:12:35 crc kubenswrapper[4792]: I0121 18:12:35.986634 4792 generic.go:334] "Generic (PLEG): container finished" podID="ca7f01d9-70c3-477f-81cd-46e32d6fafa1" containerID="e8fb2b4b0f6d92dbebf920bec35948cb77487be033db7447bd1b5acdc7cb77ec" exitCode=0 Jan 21 18:12:35 crc kubenswrapper[4792]: I0121 18:12:35.986733 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerDied","Data":"e8fb2b4b0f6d92dbebf920bec35948cb77487be033db7447bd1b5acdc7cb77ec"} Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.489191 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-twtgw"] Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.490146 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.492832 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.493012 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.494073 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-j9gjj" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.504488 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-twtgw"] Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.528243 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.528408 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5g9p\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-kube-api-access-b5g9p\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.629336 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.629477 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5g9p\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-kube-api-access-b5g9p\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.649735 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.652903 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5g9p\" (UniqueName: \"kubernetes.io/projected/2ac35a90-e309-483c-8af0-1436820dd8ae-kube-api-access-b5g9p\") pod \"cert-manager-cainjector-855d9ccff4-twtgw\" (UID: \"2ac35a90-e309-483c-8af0-1436820dd8ae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:37 crc kubenswrapper[4792]: I0121 18:12:37.807925 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" Jan 21 18:12:38 crc kubenswrapper[4792]: I0121 18:12:38.224970 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-twtgw"] Jan 21 18:12:38 crc kubenswrapper[4792]: W0121 18:12:38.240822 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ac35a90_e309_483c_8af0_1436820dd8ae.slice/crio-5db62a751352e0fe2a334d376e82a12ad875ed7062af52c682aaa943d7a0074b WatchSource:0}: Error finding container 5db62a751352e0fe2a334d376e82a12ad875ed7062af52c682aaa943d7a0074b: Status 404 returned error can't find the container with id 5db62a751352e0fe2a334d376e82a12ad875ed7062af52c682aaa943d7a0074b Jan 21 18:12:39 crc kubenswrapper[4792]: I0121 18:12:39.006560 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" event={"ID":"2ac35a90-e309-483c-8af0-1436820dd8ae","Type":"ContainerStarted","Data":"5db62a751352e0fe2a334d376e82a12ad875ed7062af52c682aaa943d7a0074b"} Jan 21 18:12:39 crc kubenswrapper[4792]: I0121 18:12:39.928029 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-qk94m" Jan 21 18:12:41 crc kubenswrapper[4792]: I0121 18:12:41.022956 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerStarted","Data":"268cbcb4d4f1e02f3e3e2fbfe30dcfb03b62f7176846c917ab8d5a7feda4aae1"} Jan 21 18:12:41 crc kubenswrapper[4792]: I0121 18:12:41.990213 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-bkv49"] Jan 21 18:12:41 crc kubenswrapper[4792]: I0121 18:12:41.991017 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:41 crc kubenswrapper[4792]: I0121 18:12:41.993241 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-j6gtn" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.010365 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-bkv49"] Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.039405 4792 generic.go:334] "Generic (PLEG): container finished" podID="ca7f01d9-70c3-477f-81cd-46e32d6fafa1" containerID="268cbcb4d4f1e02f3e3e2fbfe30dcfb03b62f7176846c917ab8d5a7feda4aae1" exitCode=0 Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.039467 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerDied","Data":"268cbcb4d4f1e02f3e3e2fbfe30dcfb03b62f7176846c917ab8d5a7feda4aae1"} Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.097631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km2gp\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-kube-api-access-km2gp\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.097939 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.199392 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.200036 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km2gp\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-kube-api-access-km2gp\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.361226 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km2gp\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-kube-api-access-km2gp\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.362561 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abbfb41-60d7-4691-a79f-c57aa65bd54e-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-bkv49\" (UID: \"3abbfb41-60d7-4691-a79f-c57aa65bd54e\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:42 crc kubenswrapper[4792]: I0121 18:12:42.607236 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:12:43 crc kubenswrapper[4792]: I0121 18:12:43.156716 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-bkv49"] Jan 21 18:12:44 crc kubenswrapper[4792]: I0121 18:12:44.059526 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ca7f01d9-70c3-477f-81cd-46e32d6fafa1","Type":"ContainerStarted","Data":"2cabb167bcfa009defc1d1900d30c989ac396bdfed36a9d51e5848282aa90362"} Jan 21 18:12:44 crc kubenswrapper[4792]: I0121 18:12:44.063613 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" event={"ID":"3abbfb41-60d7-4691-a79f-c57aa65bd54e","Type":"ContainerStarted","Data":"0fc81deed8f4081524856e89b7355f3b4a1b8840cf2237039fcc1fce6470d3bc"} Jan 21 18:12:45 crc kubenswrapper[4792]: I0121 18:12:45.073512 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:12:45 crc kubenswrapper[4792]: I0121 18:12:45.440995 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=14.769883232 podStartE2EDuration="35.440978382s" podCreationTimestamp="2026-01-21 18:12:10 +0000 UTC" firstStartedPulling="2026-01-21 18:12:11.542657222 +0000 UTC m=+965.524620408" lastFinishedPulling="2026-01-21 18:12:32.213752372 +0000 UTC m=+986.195715558" observedRunningTime="2026-01-21 18:12:45.430050646 +0000 UTC m=+999.412013842" watchObservedRunningTime="2026-01-21 18:12:45.440978382 +0000 UTC m=+999.422941568" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.304710 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.306463 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.308746 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.308821 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-l4qz6" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.308821 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.308872 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.310591 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.329333 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.378726 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.378788 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.378809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379148 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379209 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrxwq\" (UniqueName: \"kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379314 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-l4qz6-push\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379347 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379407 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379452 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379496 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379573 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-l4qz6-pull\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.379716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481167 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-l4qz6-pull\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481283 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481324 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrxwq\" (UniqueName: \"kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481486 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481517 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-l4qz6-push\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481545 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481613 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.481641 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.483345 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.483876 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.483939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.487136 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.490259 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.491668 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.494335 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.495492 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.496589 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.497945 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.498522 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-l4qz6-pull\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.507532 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-l4qz6-push\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.509493 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrxwq\" (UniqueName: \"kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq\") pod \"service-telemetry-framework-index-1-build\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:52 crc kubenswrapper[4792]: I0121 18:12:52.649591 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:12:53 crc kubenswrapper[4792]: I0121 18:12:53.570737 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:12:53 crc kubenswrapper[4792]: I0121 18:12:53.571125 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:12:56 crc kubenswrapper[4792]: I0121 18:12:56.362248 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="ca7f01d9-70c3-477f-81cd-46e32d6fafa1" containerName="elasticsearch" probeResult="failure" output=< Jan 21 18:12:56 crc kubenswrapper[4792]: {"timestamp": "2026-01-21T18:12:56+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 21 18:12:56 crc kubenswrapper[4792]: > Jan 21 18:12:59 crc kubenswrapper[4792]: I0121 18:12:59.868349 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 21 18:12:59 crc kubenswrapper[4792]: E0121 18:12:59.894571 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df" Jan 21 18:12:59 crc kubenswrapper[4792]: E0121 18:12:59.894755 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-cainjector,Image:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,Command:[/app/cmd/cainjector/cainjector],Args:[--leader-election-namespace=kube-system --v=2],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:9402,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bound-sa-token,ReadOnly:true,MountPath:/var/run/secrets/openshift/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b5g9p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000690000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-cainjector-855d9ccff4-twtgw_cert-manager(2ac35a90-e309-483c-8af0-1436820dd8ae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 18:12:59 crc kubenswrapper[4792]: E0121 18:12:59.895828 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-cainjector\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" podUID="2ac35a90-e309-483c-8af0-1436820dd8ae" Jan 21 18:13:00 crc kubenswrapper[4792]: I0121 18:13:00.192726 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" event={"ID":"3abbfb41-60d7-4691-a79f-c57aa65bd54e","Type":"ContainerStarted","Data":"b845f3c2b58e950f74c13e9831ffaff56bcbbfe482972dcd1e213752d84f6190"} Jan 21 18:13:00 crc kubenswrapper[4792]: I0121 18:13:00.193087 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:13:00 crc kubenswrapper[4792]: I0121 18:13:00.194428 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerStarted","Data":"bbf75271af22af63929390c0c1fae68cf767d095a367f1ddfa4d0d395454ee0b"} Jan 21 18:13:00 crc kubenswrapper[4792]: I0121 18:13:00.212642 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" podStartSLOduration=2.456196001 podStartE2EDuration="19.212622371s" podCreationTimestamp="2026-01-21 18:12:41 +0000 UTC" firstStartedPulling="2026-01-21 18:12:43.179333766 +0000 UTC m=+997.161296962" lastFinishedPulling="2026-01-21 18:12:59.935760146 +0000 UTC m=+1013.917723332" observedRunningTime="2026-01-21 18:13:00.208910567 +0000 UTC m=+1014.190873763" watchObservedRunningTime="2026-01-21 18:13:00.212622371 +0000 UTC m=+1014.194585557" Jan 21 18:13:01 crc kubenswrapper[4792]: I0121 18:13:01.206185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" event={"ID":"2ac35a90-e309-483c-8af0-1436820dd8ae","Type":"ContainerStarted","Data":"820546238eeb42a47d6903d8c4a2db62ec6ac8429343de75a8459186ba0b829f"} Jan 21 18:13:01 crc kubenswrapper[4792]: I0121 18:13:01.261562 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-twtgw" podStartSLOduration=-9223372012.593239 podStartE2EDuration="24.261537071s" podCreationTimestamp="2026-01-21 18:12:37 +0000 UTC" firstStartedPulling="2026-01-21 18:12:38.243871513 +0000 UTC m=+992.225834699" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:13:01.24854315 +0000 UTC m=+1015.230506336" watchObservedRunningTime="2026-01-21 18:13:01.261537071 +0000 UTC m=+1015.243500257" Jan 21 18:13:01 crc kubenswrapper[4792]: I0121 18:13:01.311058 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="ca7f01d9-70c3-477f-81cd-46e32d6fafa1" containerName="elasticsearch" probeResult="failure" output=< Jan 21 18:13:01 crc kubenswrapper[4792]: {"timestamp": "2026-01-21T18:13:01+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 21 18:13:01 crc kubenswrapper[4792]: > Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.795032 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-gct4j"] Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.796167 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.798484 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-n58cq" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.810890 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-gct4j"] Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.842081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-bound-sa-token\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.842178 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twnh6\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-kube-api-access-twnh6\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.943585 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twnh6\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-kube-api-access-twnh6\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.943681 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-bound-sa-token\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.966202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twnh6\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-kube-api-access-twnh6\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:04 crc kubenswrapper[4792]: I0121 18:13:04.968618 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e2aea4a-8b97-4875-8fc5-b4ad936e1708-bound-sa-token\") pod \"cert-manager-86cb77c54b-gct4j\" (UID: \"3e2aea4a-8b97-4875-8fc5-b4ad936e1708\") " pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:05 crc kubenswrapper[4792]: I0121 18:13:05.117276 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-gct4j" Jan 21 18:13:06 crc kubenswrapper[4792]: I0121 18:13:06.381541 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="ca7f01d9-70c3-477f-81cd-46e32d6fafa1" containerName="elasticsearch" probeResult="failure" output=< Jan 21 18:13:06 crc kubenswrapper[4792]: {"timestamp": "2026-01-21T18:13:06+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 21 18:13:06 crc kubenswrapper[4792]: > Jan 21 18:13:07 crc kubenswrapper[4792]: I0121 18:13:07.610410 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-bkv49" Jan 21 18:13:08 crc kubenswrapper[4792]: I0121 18:13:08.894629 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-gct4j"] Jan 21 18:13:08 crc kubenswrapper[4792]: W0121 18:13:08.902167 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e2aea4a_8b97_4875_8fc5_b4ad936e1708.slice/crio-8a819c0d49f904f69d785f4e919e973613976bcd0c97a48c1ced1116b1388c73 WatchSource:0}: Error finding container 8a819c0d49f904f69d785f4e919e973613976bcd0c97a48c1ced1116b1388c73: Status 404 returned error can't find the container with id 8a819c0d49f904f69d785f4e919e973613976bcd0c97a48c1ced1116b1388c73 Jan 21 18:13:09 crc kubenswrapper[4792]: I0121 18:13:09.316466 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerStarted","Data":"e5c396b62b98a5b1053f902d05591a0a607dab1696f93a9de14d7d3d0a1e95dc"} Jan 21 18:13:09 crc kubenswrapper[4792]: I0121 18:13:09.318615 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-gct4j" event={"ID":"3e2aea4a-8b97-4875-8fc5-b4ad936e1708","Type":"ContainerStarted","Data":"3d776bde980d6a88372d04c7174980aafa5ef7ba817f5708644242893640ef02"} Jan 21 18:13:09 crc kubenswrapper[4792]: I0121 18:13:09.318675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-gct4j" event={"ID":"3e2aea4a-8b97-4875-8fc5-b4ad936e1708","Type":"ContainerStarted","Data":"8a819c0d49f904f69d785f4e919e973613976bcd0c97a48c1ced1116b1388c73"} Jan 21 18:13:09 crc kubenswrapper[4792]: I0121 18:13:09.375811 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-gct4j" podStartSLOduration=5.375786095 podStartE2EDuration="5.375786095s" podCreationTimestamp="2026-01-21 18:13:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:13:09.371019441 +0000 UTC m=+1023.352982627" watchObservedRunningTime="2026-01-21 18:13:09.375786095 +0000 UTC m=+1023.357749281" Jan 21 18:13:10 crc kubenswrapper[4792]: I0121 18:13:10.327228 4792 generic.go:334] "Generic (PLEG): container finished" podID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerID="e5c396b62b98a5b1053f902d05591a0a607dab1696f93a9de14d7d3d0a1e95dc" exitCode=0 Jan 21 18:13:10 crc kubenswrapper[4792]: I0121 18:13:10.328393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerDied","Data":"e5c396b62b98a5b1053f902d05591a0a607dab1696f93a9de14d7d3d0a1e95dc"} Jan 21 18:13:11 crc kubenswrapper[4792]: I0121 18:13:11.335641 4792 generic.go:334] "Generic (PLEG): container finished" podID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerID="c940caee96f5ad39ae7a9e319bd3ef2289c059b421485d060c0167f6b1e5f610" exitCode=0 Jan 21 18:13:11 crc kubenswrapper[4792]: I0121 18:13:11.335713 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerDied","Data":"c940caee96f5ad39ae7a9e319bd3ef2289c059b421485d060c0167f6b1e5f610"} Jan 21 18:13:11 crc kubenswrapper[4792]: I0121 18:13:11.420036 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_fb43b2a1-30ee-488c-b6c5-e43874f03605/manage-dockerfile/0.log" Jan 21 18:13:11 crc kubenswrapper[4792]: I0121 18:13:11.583964 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Jan 21 18:13:12 crc kubenswrapper[4792]: I0121 18:13:12.348208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerStarted","Data":"64968db206559daba7b258ec68b24b4a32bd34c28f8db83fac0013972a52f5c6"} Jan 21 18:13:12 crc kubenswrapper[4792]: I0121 18:13:12.374909 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=11.569088011 podStartE2EDuration="20.374888831s" podCreationTimestamp="2026-01-21 18:12:52 +0000 UTC" firstStartedPulling="2026-01-21 18:12:59.89696373 +0000 UTC m=+1013.878926916" lastFinishedPulling="2026-01-21 18:13:08.70276455 +0000 UTC m=+1022.684727736" observedRunningTime="2026-01-21 18:13:12.371416395 +0000 UTC m=+1026.353379581" watchObservedRunningTime="2026-01-21 18:13:12.374888831 +0000 UTC m=+1026.356852017" Jan 21 18:13:23 crc kubenswrapper[4792]: I0121 18:13:23.570815 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:13:23 crc kubenswrapper[4792]: I0121 18:13:23.571492 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:13:23 crc kubenswrapper[4792]: I0121 18:13:23.571541 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:13:23 crc kubenswrapper[4792]: I0121 18:13:23.572367 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:13:23 crc kubenswrapper[4792]: I0121 18:13:23.572431 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e" gracePeriod=600 Jan 21 18:13:24 crc kubenswrapper[4792]: I0121 18:13:24.490817 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e" exitCode=0 Jan 21 18:13:24 crc kubenswrapper[4792]: I0121 18:13:24.490898 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e"} Jan 21 18:13:24 crc kubenswrapper[4792]: I0121 18:13:24.491276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02"} Jan 21 18:13:24 crc kubenswrapper[4792]: I0121 18:13:24.491359 4792 scope.go:117] "RemoveContainer" containerID="db9d47c638475d5f4679b8511694d303d4101605ef33cf9eed866432ca6cd998" Jan 21 18:13:30 crc kubenswrapper[4792]: I0121 18:13:30.876824 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:30 crc kubenswrapper[4792]: I0121 18:13:30.879338 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:30 crc kubenswrapper[4792]: I0121 18:13:30.894640 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.011284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.011346 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ls7x\" (UniqueName: \"kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.011519 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.113012 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.113143 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.113166 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ls7x\" (UniqueName: \"kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.113621 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.113871 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.135729 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ls7x\" (UniqueName: \"kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x\") pod \"community-operators-ztfzm\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:31 crc kubenswrapper[4792]: I0121 18:13:31.206497 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:32 crc kubenswrapper[4792]: I0121 18:13:32.016096 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:32 crc kubenswrapper[4792]: I0121 18:13:32.569178 4792 generic.go:334] "Generic (PLEG): container finished" podID="b6590a31-f715-4d81-840c-c01257f00fe3" containerID="9ca5f73a73a415951db13d69acaf8b6b28b61498d9a4b8c698dab09477dab513" exitCode=0 Jan 21 18:13:32 crc kubenswrapper[4792]: I0121 18:13:32.569282 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerDied","Data":"9ca5f73a73a415951db13d69acaf8b6b28b61498d9a4b8c698dab09477dab513"} Jan 21 18:13:32 crc kubenswrapper[4792]: I0121 18:13:32.569639 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerStarted","Data":"e26436fbbd8fb33373bde95fea9c2ee5b49b7450448f4772b028d47fc67a42c8"} Jan 21 18:13:33 crc kubenswrapper[4792]: I0121 18:13:33.578104 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerStarted","Data":"5b09aaf161a6fb31b1148a1852ceff77ed8ac5939f912380782315a8ca303ef3"} Jan 21 18:13:34 crc kubenswrapper[4792]: I0121 18:13:34.692321 4792 generic.go:334] "Generic (PLEG): container finished" podID="b6590a31-f715-4d81-840c-c01257f00fe3" containerID="5b09aaf161a6fb31b1148a1852ceff77ed8ac5939f912380782315a8ca303ef3" exitCode=0 Jan 21 18:13:34 crc kubenswrapper[4792]: I0121 18:13:34.692381 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerDied","Data":"5b09aaf161a6fb31b1148a1852ceff77ed8ac5939f912380782315a8ca303ef3"} Jan 21 18:13:35 crc kubenswrapper[4792]: I0121 18:13:35.701430 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerStarted","Data":"891d9eaf72b14116cfa6a0f2f35cb0ba6b6dea68d2e6d270ce9929830a3bd77f"} Jan 21 18:13:35 crc kubenswrapper[4792]: I0121 18:13:35.721868 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ztfzm" podStartSLOduration=3.140748619 podStartE2EDuration="5.721830944s" podCreationTimestamp="2026-01-21 18:13:30 +0000 UTC" firstStartedPulling="2026-01-21 18:13:32.570876193 +0000 UTC m=+1046.552839379" lastFinishedPulling="2026-01-21 18:13:35.151958518 +0000 UTC m=+1049.133921704" observedRunningTime="2026-01-21 18:13:35.718086588 +0000 UTC m=+1049.700049784" watchObservedRunningTime="2026-01-21 18:13:35.721830944 +0000 UTC m=+1049.703794130" Jan 21 18:13:41 crc kubenswrapper[4792]: I0121 18:13:41.207447 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:41 crc kubenswrapper[4792]: I0121 18:13:41.208318 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:41 crc kubenswrapper[4792]: I0121 18:13:41.262546 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:41 crc kubenswrapper[4792]: I0121 18:13:41.797243 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:41 crc kubenswrapper[4792]: I0121 18:13:41.928408 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:43 crc kubenswrapper[4792]: I0121 18:13:43.783141 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ztfzm" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="registry-server" containerID="cri-o://891d9eaf72b14116cfa6a0f2f35cb0ba6b6dea68d2e6d270ce9929830a3bd77f" gracePeriod=2 Jan 21 18:13:44 crc kubenswrapper[4792]: I0121 18:13:44.815773 4792 generic.go:334] "Generic (PLEG): container finished" podID="b6590a31-f715-4d81-840c-c01257f00fe3" containerID="891d9eaf72b14116cfa6a0f2f35cb0ba6b6dea68d2e6d270ce9929830a3bd77f" exitCode=0 Jan 21 18:13:44 crc kubenswrapper[4792]: I0121 18:13:44.815877 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerDied","Data":"891d9eaf72b14116cfa6a0f2f35cb0ba6b6dea68d2e6d270ce9929830a3bd77f"} Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.108791 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.222000 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities\") pod \"b6590a31-f715-4d81-840c-c01257f00fe3\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.222635 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content\") pod \"b6590a31-f715-4d81-840c-c01257f00fe3\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.222725 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ls7x\" (UniqueName: \"kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x\") pod \"b6590a31-f715-4d81-840c-c01257f00fe3\" (UID: \"b6590a31-f715-4d81-840c-c01257f00fe3\") " Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.222974 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities" (OuterVolumeSpecName: "utilities") pod "b6590a31-f715-4d81-840c-c01257f00fe3" (UID: "b6590a31-f715-4d81-840c-c01257f00fe3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.223233 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.229453 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x" (OuterVolumeSpecName: "kube-api-access-6ls7x") pod "b6590a31-f715-4d81-840c-c01257f00fe3" (UID: "b6590a31-f715-4d81-840c-c01257f00fe3"). InnerVolumeSpecName "kube-api-access-6ls7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.278673 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6590a31-f715-4d81-840c-c01257f00fe3" (UID: "b6590a31-f715-4d81-840c-c01257f00fe3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.324149 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6590a31-f715-4d81-840c-c01257f00fe3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.324201 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ls7x\" (UniqueName: \"kubernetes.io/projected/b6590a31-f715-4d81-840c-c01257f00fe3-kube-api-access-6ls7x\") on node \"crc\" DevicePath \"\"" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.835904 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztfzm" event={"ID":"b6590a31-f715-4d81-840c-c01257f00fe3","Type":"ContainerDied","Data":"e26436fbbd8fb33373bde95fea9c2ee5b49b7450448f4772b028d47fc67a42c8"} Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.835987 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztfzm" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.835994 4792 scope.go:117] "RemoveContainer" containerID="891d9eaf72b14116cfa6a0f2f35cb0ba6b6dea68d2e6d270ce9929830a3bd77f" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.855756 4792 scope.go:117] "RemoveContainer" containerID="5b09aaf161a6fb31b1148a1852ceff77ed8ac5939f912380782315a8ca303ef3" Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.886475 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.892082 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ztfzm"] Jan 21 18:13:46 crc kubenswrapper[4792]: I0121 18:13:46.910307 4792 scope.go:117] "RemoveContainer" containerID="9ca5f73a73a415951db13d69acaf8b6b28b61498d9a4b8c698dab09477dab513" Jan 21 18:13:48 crc kubenswrapper[4792]: I0121 18:13:48.265114 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" path="/var/lib/kubelet/pods/b6590a31-f715-4d81-840c-c01257f00fe3/volumes" Jan 21 18:14:08 crc kubenswrapper[4792]: I0121 18:14:08.105407 4792 generic.go:334] "Generic (PLEG): container finished" podID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerID="64968db206559daba7b258ec68b24b4a32bd34c28f8db83fac0013972a52f5c6" exitCode=0 Jan 21 18:14:08 crc kubenswrapper[4792]: I0121 18:14:08.105464 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerDied","Data":"64968db206559daba7b258ec68b24b4a32bd34c28f8db83fac0013972a52f5c6"} Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.447147 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581570 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581652 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-l4qz6-push\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581714 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581745 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrxwq\" (UniqueName: \"kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581774 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581799 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581863 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.581944 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-l4qz6-pull\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582023 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582110 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582129 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582150 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582176 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"fb43b2a1-30ee-488c-b6c5-e43874f03605\" (UID: \"fb43b2a1-30ee-488c-b6c5-e43874f03605\") " Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.582917 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.583312 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.583388 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.583915 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.584609 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.584740 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.585200 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.589990 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull" (OuterVolumeSpecName: "builder-dockercfg-l4qz6-pull") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "builder-dockercfg-l4qz6-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.590072 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.590990 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push" (OuterVolumeSpecName: "builder-dockercfg-l4qz6-push") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "builder-dockercfg-l4qz6-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.594203 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq" (OuterVolumeSpecName: "kube-api-access-vrxwq") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "kube-api-access-vrxwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684485 4792 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684532 4792 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684549 4792 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-l4qz6-push\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-push\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684564 4792 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684576 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrxwq\" (UniqueName: \"kubernetes.io/projected/fb43b2a1-30ee-488c-b6c5-e43874f03605-kube-api-access-vrxwq\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684587 4792 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684598 4792 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684609 4792 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-l4qz6-pull\" (UniqueName: \"kubernetes.io/secret/fb43b2a1-30ee-488c-b6c5-e43874f03605-builder-dockercfg-l4qz6-pull\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684620 4792 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fb43b2a1-30ee-488c-b6c5-e43874f03605-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684632 4792 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.684644 4792 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.796745 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:09 crc kubenswrapper[4792]: I0121 18:14:09.889631 4792 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.131222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"fb43b2a1-30ee-488c-b6c5-e43874f03605","Type":"ContainerDied","Data":"bbf75271af22af63929390c0c1fae68cf767d095a367f1ddfa4d0d395454ee0b"} Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.131293 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbf75271af22af63929390c0c1fae68cf767d095a367f1ddfa4d0d395454ee0b" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.131321 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.953865 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954433 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="git-clone" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954448 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="git-clone" Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954459 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="extract-content" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954466 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="extract-content" Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954474 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="manage-dockerfile" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954482 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="manage-dockerfile" Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954492 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="registry-server" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954499 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="registry-server" Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954714 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="docker-build" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954726 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="docker-build" Jan 21 18:14:10 crc kubenswrapper[4792]: E0121 18:14:10.954738 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="extract-utilities" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954746 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="extract-utilities" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954880 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb43b2a1-30ee-488c-b6c5-e43874f03605" containerName="docker-build" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.954896 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6590a31-f715-4d81-840c-c01257f00fe3" containerName="registry-server" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.955408 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.961958 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-operators-dockercfg-fhsf4" Jan 21 18:14:10 crc kubenswrapper[4792]: I0121 18:14:10.964637 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:11 crc kubenswrapper[4792]: I0121 18:14:11.108233 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdbpj\" (UniqueName: \"kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj\") pod \"service-telemetry-framework-operators-2899d\" (UID: \"b25169cb-ffc8-4717-89cf-4049e6790b11\") " pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:11 crc kubenswrapper[4792]: I0121 18:14:11.210241 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdbpj\" (UniqueName: \"kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj\") pod \"service-telemetry-framework-operators-2899d\" (UID: \"b25169cb-ffc8-4717-89cf-4049e6790b11\") " pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:11 crc kubenswrapper[4792]: I0121 18:14:11.247475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdbpj\" (UniqueName: \"kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj\") pod \"service-telemetry-framework-operators-2899d\" (UID: \"b25169cb-ffc8-4717-89cf-4049e6790b11\") " pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:11 crc kubenswrapper[4792]: I0121 18:14:11.273381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:11 crc kubenswrapper[4792]: I0121 18:14:11.509895 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:12 crc kubenswrapper[4792]: I0121 18:14:12.147611 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-2899d" event={"ID":"b25169cb-ffc8-4717-89cf-4049e6790b11","Type":"ContainerStarted","Data":"4abaf33ede563e2781777dd5fc4d9f4ab4544bdd6bfd444d1f52f39992ff613e"} Jan 21 18:14:13 crc kubenswrapper[4792]: I0121 18:14:13.129175 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "fb43b2a1-30ee-488c-b6c5-e43874f03605" (UID: "fb43b2a1-30ee-488c-b6c5-e43874f03605"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:13 crc kubenswrapper[4792]: I0121 18:14:13.141118 4792 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fb43b2a1-30ee-488c-b6c5-e43874f03605-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:15 crc kubenswrapper[4792]: I0121 18:14:15.544189 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.334726 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dgnfp"] Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.339593 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.344022 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dgnfp"] Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.493311 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52cmh\" (UniqueName: \"kubernetes.io/projected/3118ce29-fb3c-4dcc-8dc6-29c9f97a7969-kube-api-access-52cmh\") pod \"service-telemetry-framework-operators-dgnfp\" (UID: \"3118ce29-fb3c-4dcc-8dc6-29c9f97a7969\") " pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.595008 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52cmh\" (UniqueName: \"kubernetes.io/projected/3118ce29-fb3c-4dcc-8dc6-29c9f97a7969-kube-api-access-52cmh\") pod \"service-telemetry-framework-operators-dgnfp\" (UID: \"3118ce29-fb3c-4dcc-8dc6-29c9f97a7969\") " pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.617477 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52cmh\" (UniqueName: \"kubernetes.io/projected/3118ce29-fb3c-4dcc-8dc6-29c9f97a7969-kube-api-access-52cmh\") pod \"service-telemetry-framework-operators-dgnfp\" (UID: \"3118ce29-fb3c-4dcc-8dc6-29c9f97a7969\") " pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:16 crc kubenswrapper[4792]: I0121 18:14:16.667776 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:27 crc kubenswrapper[4792]: E0121 18:14:27.426491 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest" Jan 21 18:14:27 crc kubenswrapper[4792]: E0121 18:14:27.427524 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/service-telemetry-framework-index:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hdbpj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod service-telemetry-framework-operators-2899d_service-telemetry(b25169cb-ffc8-4717-89cf-4049e6790b11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:14:27 crc kubenswrapper[4792]: E0121 18:14:27.428680 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/service-telemetry-framework-operators-2899d" podUID="b25169cb-ffc8-4717-89cf-4049e6790b11" Jan 21 18:14:27 crc kubenswrapper[4792]: I0121 18:14:27.460699 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dgnfp"] Jan 21 18:14:28 crc kubenswrapper[4792]: I0121 18:14:28.286953 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" event={"ID":"3118ce29-fb3c-4dcc-8dc6-29c9f97a7969","Type":"ContainerStarted","Data":"b1c48dc9136e55785e6cade39c9d68b8033354bb684bbc58d42bee36557fbbc0"} Jan 21 18:14:29 crc kubenswrapper[4792]: I0121 18:14:29.877169 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:29 crc kubenswrapper[4792]: I0121 18:14:29.975669 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdbpj\" (UniqueName: \"kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj\") pod \"b25169cb-ffc8-4717-89cf-4049e6790b11\" (UID: \"b25169cb-ffc8-4717-89cf-4049e6790b11\") " Jan 21 18:14:29 crc kubenswrapper[4792]: I0121 18:14:29.981895 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj" (OuterVolumeSpecName: "kube-api-access-hdbpj") pod "b25169cb-ffc8-4717-89cf-4049e6790b11" (UID: "b25169cb-ffc8-4717-89cf-4049e6790b11"). InnerVolumeSpecName "kube-api-access-hdbpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.077454 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdbpj\" (UniqueName: \"kubernetes.io/projected/b25169cb-ffc8-4717-89cf-4049e6790b11-kube-api-access-hdbpj\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.301721 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-2899d" event={"ID":"b25169cb-ffc8-4717-89cf-4049e6790b11","Type":"ContainerDied","Data":"4abaf33ede563e2781777dd5fc4d9f4ab4544bdd6bfd444d1f52f39992ff613e"} Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.301786 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-2899d" Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.303416 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" event={"ID":"3118ce29-fb3c-4dcc-8dc6-29c9f97a7969","Type":"ContainerStarted","Data":"b93150caff32070cf43362e878d8a7af3f69860f50df51ca41613312e174b8e6"} Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.336199 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" podStartSLOduration=12.147897559 podStartE2EDuration="14.336177315s" podCreationTimestamp="2026-01-21 18:14:16 +0000 UTC" firstStartedPulling="2026-01-21 18:14:27.499233112 +0000 UTC m=+1101.481196298" lastFinishedPulling="2026-01-21 18:14:29.687512868 +0000 UTC m=+1103.669476054" observedRunningTime="2026-01-21 18:14:30.321528163 +0000 UTC m=+1104.303491349" watchObservedRunningTime="2026-01-21 18:14:30.336177315 +0000 UTC m=+1104.318140501" Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.361161 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:30 crc kubenswrapper[4792]: I0121 18:14:30.365743 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-2899d"] Jan 21 18:14:32 crc kubenswrapper[4792]: I0121 18:14:32.255577 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b25169cb-ffc8-4717-89cf-4049e6790b11" path="/var/lib/kubelet/pods/b25169cb-ffc8-4717-89cf-4049e6790b11/volumes" Jan 21 18:14:36 crc kubenswrapper[4792]: I0121 18:14:36.668769 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:36 crc kubenswrapper[4792]: I0121 18:14:36.669476 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:36 crc kubenswrapper[4792]: I0121 18:14:36.698222 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:37 crc kubenswrapper[4792]: I0121 18:14:37.374760 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-dgnfp" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.013225 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj"] Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.023121 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.025703 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj"] Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.148488 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4696\" (UniqueName: \"kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.148570 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.148597 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.250650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.250732 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.250874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4696\" (UniqueName: \"kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.251519 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.251607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.281907 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4696\" (UniqueName: \"kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.347363 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.739680 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx"] Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.742022 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.744930 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.756599 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx"] Jan 21 18:14:47 crc kubenswrapper[4792]: W0121 18:14:47.799262 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84938c18_930b_4348_b2e8_e73ee66d0a08.slice/crio-4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a WatchSource:0}: Error finding container 4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a: Status 404 returned error can't find the container with id 4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.804889 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj"] Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.859831 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.859936 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.859967 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phlxq\" (UniqueName: \"kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.961246 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.961314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phlxq\" (UniqueName: \"kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.961377 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.961882 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.961890 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:47 crc kubenswrapper[4792]: I0121 18:14:47.984513 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phlxq\" (UniqueName: \"kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.063532 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.424736 4792 generic.go:334] "Generic (PLEG): container finished" podID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerID="f3761432e417756c26e4484dbc0a0c437e1215a9ec105058ce4b87f0cf3e74f5" exitCode=0 Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.424798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" event={"ID":"84938c18-930b-4348-b2e8-e73ee66d0a08","Type":"ContainerDied","Data":"f3761432e417756c26e4484dbc0a0c437e1215a9ec105058ce4b87f0cf3e74f5"} Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.425113 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" event={"ID":"84938c18-930b-4348-b2e8-e73ee66d0a08","Type":"ContainerStarted","Data":"4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a"} Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.487740 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx"] Jan 21 18:14:48 crc kubenswrapper[4792]: W0121 18:14:48.493376 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bc48447_ec9a_4a17_b701_687d94cac18d.slice/crio-cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9 WatchSource:0}: Error finding container cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9: Status 404 returned error can't find the container with id cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9 Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.739253 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm"] Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.741226 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.751224 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm"] Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.878115 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl86n\" (UniqueName: \"kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.878200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.878231 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.981200 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl86n\" (UniqueName: \"kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.981264 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.981298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.981946 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:48 crc kubenswrapper[4792]: I0121 18:14:48.982133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.006028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl86n\" (UniqueName: \"kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.098166 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.432799 4792 generic.go:334] "Generic (PLEG): container finished" podID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerID="337522fc9d45a79723932ff49579001219c8045d73c8b1245d34a63fdd2294d0" exitCode=0 Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.432935 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" event={"ID":"2bc48447-ec9a-4a17-b701-687d94cac18d","Type":"ContainerDied","Data":"337522fc9d45a79723932ff49579001219c8045d73c8b1245d34a63fdd2294d0"} Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.433523 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" event={"ID":"2bc48447-ec9a-4a17-b701-687d94cac18d","Type":"ContainerStarted","Data":"cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9"} Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.438561 4792 generic.go:334] "Generic (PLEG): container finished" podID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerID="b7aa471ba699ee00438bc4fd3634084f7fd32b7761543e4267fb84ce1ef81932" exitCode=0 Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.438618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" event={"ID":"84938c18-930b-4348-b2e8-e73ee66d0a08","Type":"ContainerDied","Data":"b7aa471ba699ee00438bc4fd3634084f7fd32b7761543e4267fb84ce1ef81932"} Jan 21 18:14:49 crc kubenswrapper[4792]: I0121 18:14:49.517567 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm"] Jan 21 18:14:49 crc kubenswrapper[4792]: W0121 18:14:49.528351 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98302177_ddc2_4102_925e_00c964486bb2.slice/crio-844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e WatchSource:0}: Error finding container 844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e: Status 404 returned error can't find the container with id 844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e Jan 21 18:14:50 crc kubenswrapper[4792]: I0121 18:14:50.451039 4792 generic.go:334] "Generic (PLEG): container finished" podID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerID="5295a3f3335d04ff3781d540c7efcdd5ca7bfd642ad0de0b4fc7858c4e37b6b2" exitCode=0 Jan 21 18:14:50 crc kubenswrapper[4792]: I0121 18:14:50.451160 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" event={"ID":"84938c18-930b-4348-b2e8-e73ee66d0a08","Type":"ContainerDied","Data":"5295a3f3335d04ff3781d540c7efcdd5ca7bfd642ad0de0b4fc7858c4e37b6b2"} Jan 21 18:14:50 crc kubenswrapper[4792]: I0121 18:14:50.454558 4792 generic.go:334] "Generic (PLEG): container finished" podID="98302177-ddc2-4102-925e-00c964486bb2" containerID="43876632df0fb48ec193905d1ee31656f101c259138d1690d917834698ea7dc1" exitCode=0 Jan 21 18:14:50 crc kubenswrapper[4792]: I0121 18:14:50.454624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" event={"ID":"98302177-ddc2-4102-925e-00c964486bb2","Type":"ContainerDied","Data":"43876632df0fb48ec193905d1ee31656f101c259138d1690d917834698ea7dc1"} Jan 21 18:14:50 crc kubenswrapper[4792]: I0121 18:14:50.454658 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" event={"ID":"98302177-ddc2-4102-925e-00c964486bb2","Type":"ContainerStarted","Data":"844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e"} Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.798799 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.961472 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util\") pod \"84938c18-930b-4348-b2e8-e73ee66d0a08\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.961615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle\") pod \"84938c18-930b-4348-b2e8-e73ee66d0a08\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.961680 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4696\" (UniqueName: \"kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696\") pod \"84938c18-930b-4348-b2e8-e73ee66d0a08\" (UID: \"84938c18-930b-4348-b2e8-e73ee66d0a08\") " Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.963909 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle" (OuterVolumeSpecName: "bundle") pod "84938c18-930b-4348-b2e8-e73ee66d0a08" (UID: "84938c18-930b-4348-b2e8-e73ee66d0a08"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.972181 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696" (OuterVolumeSpecName: "kube-api-access-m4696") pod "84938c18-930b-4348-b2e8-e73ee66d0a08" (UID: "84938c18-930b-4348-b2e8-e73ee66d0a08"). InnerVolumeSpecName "kube-api-access-m4696". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:51 crc kubenswrapper[4792]: I0121 18:14:51.978518 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util" (OuterVolumeSpecName: "util") pod "84938c18-930b-4348-b2e8-e73ee66d0a08" (UID: "84938c18-930b-4348-b2e8-e73ee66d0a08"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.063237 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.063278 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4696\" (UniqueName: \"kubernetes.io/projected/84938c18-930b-4348-b2e8-e73ee66d0a08-kube-api-access-m4696\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.063291 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84938c18-930b-4348-b2e8-e73ee66d0a08-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.473515 4792 generic.go:334] "Generic (PLEG): container finished" podID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerID="2ffea37a4a0ed64a65c5d7c8bb9247a8aa8fb98b1be2bdb47b934db87aacabe4" exitCode=0 Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.473580 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" event={"ID":"2bc48447-ec9a-4a17-b701-687d94cac18d","Type":"ContainerDied","Data":"2ffea37a4a0ed64a65c5d7c8bb9247a8aa8fb98b1be2bdb47b934db87aacabe4"} Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.479717 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" event={"ID":"84938c18-930b-4348-b2e8-e73ee66d0a08","Type":"ContainerDied","Data":"4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a"} Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.479766 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a27c280fb4ecb4cad91ead0b31d0a95b12b81f1dad5dd91cd8f78441723715a" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.479779 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj" Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.483887 4792 generic.go:334] "Generic (PLEG): container finished" podID="98302177-ddc2-4102-925e-00c964486bb2" containerID="d6e4812e0e2f5567b8e9a294b05b7741d6b8f47647c6cd34814f88d88196ef09" exitCode=0 Jan 21 18:14:52 crc kubenswrapper[4792]: I0121 18:14:52.483968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" event={"ID":"98302177-ddc2-4102-925e-00c964486bb2","Type":"ContainerDied","Data":"d6e4812e0e2f5567b8e9a294b05b7741d6b8f47647c6cd34814f88d88196ef09"} Jan 21 18:14:53 crc kubenswrapper[4792]: I0121 18:14:53.493221 4792 generic.go:334] "Generic (PLEG): container finished" podID="98302177-ddc2-4102-925e-00c964486bb2" containerID="e44c0d678bb6e6bfbdf081bf5a2d75f818b8c7a6a1a4108e0acecc541b45e5b2" exitCode=0 Jan 21 18:14:53 crc kubenswrapper[4792]: I0121 18:14:53.493314 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" event={"ID":"98302177-ddc2-4102-925e-00c964486bb2","Type":"ContainerDied","Data":"e44c0d678bb6e6bfbdf081bf5a2d75f818b8c7a6a1a4108e0acecc541b45e5b2"} Jan 21 18:14:53 crc kubenswrapper[4792]: I0121 18:14:53.498823 4792 generic.go:334] "Generic (PLEG): container finished" podID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerID="8ecac2d969eaf6b4ef5e841bf7d1d5fafe3d85692eddc0a0c762f6deeb882076" exitCode=0 Jan 21 18:14:53 crc kubenswrapper[4792]: I0121 18:14:53.498913 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" event={"ID":"2bc48447-ec9a-4a17-b701-687d94cac18d","Type":"ContainerDied","Data":"8ecac2d969eaf6b4ef5e841bf7d1d5fafe3d85692eddc0a0c762f6deeb882076"} Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.799615 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.818302 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.903696 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl86n\" (UniqueName: \"kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n\") pod \"98302177-ddc2-4102-925e-00c964486bb2\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.903825 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util\") pod \"2bc48447-ec9a-4a17-b701-687d94cac18d\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.903879 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util\") pod \"98302177-ddc2-4102-925e-00c964486bb2\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.903968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle\") pod \"2bc48447-ec9a-4a17-b701-687d94cac18d\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.904027 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle\") pod \"98302177-ddc2-4102-925e-00c964486bb2\" (UID: \"98302177-ddc2-4102-925e-00c964486bb2\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.904098 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phlxq\" (UniqueName: \"kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq\") pod \"2bc48447-ec9a-4a17-b701-687d94cac18d\" (UID: \"2bc48447-ec9a-4a17-b701-687d94cac18d\") " Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.904787 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle" (OuterVolumeSpecName: "bundle") pod "2bc48447-ec9a-4a17-b701-687d94cac18d" (UID: "2bc48447-ec9a-4a17-b701-687d94cac18d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.905162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle" (OuterVolumeSpecName: "bundle") pod "98302177-ddc2-4102-925e-00c964486bb2" (UID: "98302177-ddc2-4102-925e-00c964486bb2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.912226 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq" (OuterVolumeSpecName: "kube-api-access-phlxq") pod "2bc48447-ec9a-4a17-b701-687d94cac18d" (UID: "2bc48447-ec9a-4a17-b701-687d94cac18d"). InnerVolumeSpecName "kube-api-access-phlxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.912298 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n" (OuterVolumeSpecName: "kube-api-access-zl86n") pod "98302177-ddc2-4102-925e-00c964486bb2" (UID: "98302177-ddc2-4102-925e-00c964486bb2"). InnerVolumeSpecName "kube-api-access-zl86n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:54 crc kubenswrapper[4792]: I0121 18:14:54.917162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util" (OuterVolumeSpecName: "util") pod "2bc48447-ec9a-4a17-b701-687d94cac18d" (UID: "2bc48447-ec9a-4a17-b701-687d94cac18d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.005525 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.006058 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2bc48447-ec9a-4a17-b701-687d94cac18d-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.006091 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.006103 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phlxq\" (UniqueName: \"kubernetes.io/projected/2bc48447-ec9a-4a17-b701-687d94cac18d-kube-api-access-phlxq\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.006118 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl86n\" (UniqueName: \"kubernetes.io/projected/98302177-ddc2-4102-925e-00c964486bb2-kube-api-access-zl86n\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.006797 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util" (OuterVolumeSpecName: "util") pod "98302177-ddc2-4102-925e-00c964486bb2" (UID: "98302177-ddc2-4102-925e-00c964486bb2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.107384 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98302177-ddc2-4102-925e-00c964486bb2-util\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.514316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" event={"ID":"2bc48447-ec9a-4a17-b701-687d94cac18d","Type":"ContainerDied","Data":"cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9"} Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.514363 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.514400 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf76481934f2a2a24338692c6a7dd4ec11ea13421bdf42dd19e82c6097689da9" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.520473 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" event={"ID":"98302177-ddc2-4102-925e-00c964486bb2","Type":"ContainerDied","Data":"844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e"} Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.520542 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="844ec19aa6329d2d74fd9539d3d21a5129675f76f1dadc6ee466309eddee763e" Jan 21 18:14:55 crc kubenswrapper[4792]: I0121 18:14:55.520567 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.675457 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-qtp67"] Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676269 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676285 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676295 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676302 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676310 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676318 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676325 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676330 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676344 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676349 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676360 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676366 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="pull" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676377 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676383 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676394 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676400 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: E0121 18:14:59.676409 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676414 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="util" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676527 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="84938c18-930b-4348-b2e8-e73ee66d0a08" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676550 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="98302177-ddc2-4102-925e-00c964486bb2" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.676560 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc48447-ec9a-4a17-b701-687d94cac18d" containerName="extract" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.677178 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.679624 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-hxsgt" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.733277 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-qtp67"] Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.778823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlgn7\" (UniqueName: \"kubernetes.io/projected/832ffd66-8482-4991-b666-ebc765fc5f8a-kube-api-access-rlgn7\") pod \"interconnect-operator-5bb49f789d-qtp67\" (UID: \"832ffd66-8482-4991-b666-ebc765fc5f8a\") " pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.880749 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlgn7\" (UniqueName: \"kubernetes.io/projected/832ffd66-8482-4991-b666-ebc765fc5f8a-kube-api-access-rlgn7\") pod \"interconnect-operator-5bb49f789d-qtp67\" (UID: \"832ffd66-8482-4991-b666-ebc765fc5f8a\") " pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.900886 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlgn7\" (UniqueName: \"kubernetes.io/projected/832ffd66-8482-4991-b666-ebc765fc5f8a-kube-api-access-rlgn7\") pod \"interconnect-operator-5bb49f789d-qtp67\" (UID: \"832ffd66-8482-4991-b666-ebc765fc5f8a\") " pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" Jan 21 18:14:59 crc kubenswrapper[4792]: I0121 18:14:59.997301 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.209975 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz"] Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.211741 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.216526 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.217190 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.285851 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz"] Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.291805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.291907 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp2k8\" (UniqueName: \"kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.291994 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.393619 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.393758 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.393779 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp2k8\" (UniqueName: \"kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.397187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.403483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.407980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-qtp67"] Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.441089 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp2k8\" (UniqueName: \"kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8\") pod \"collect-profiles-29483655-pgpdz\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.555810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" event={"ID":"832ffd66-8482-4991-b666-ebc765fc5f8a","Type":"ContainerStarted","Data":"2b8564a1d913e2004813b2f12871266de561e8023e1f75473d53c2ace87b8651"} Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.563502 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:00 crc kubenswrapper[4792]: I0121 18:15:00.841638 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz"] Jan 21 18:15:00 crc kubenswrapper[4792]: W0121 18:15:00.851256 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54e9171e_bf7c_49e7_8fda_1063833badf9.slice/crio-2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25 WatchSource:0}: Error finding container 2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25: Status 404 returned error can't find the container with id 2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25 Jan 21 18:15:01 crc kubenswrapper[4792]: I0121 18:15:01.566327 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" event={"ID":"54e9171e-bf7c-49e7-8fda-1063833badf9","Type":"ContainerStarted","Data":"2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25"} Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.442976 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr"] Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.444039 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.451311 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-mcqvd" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.470748 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr"] Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.528078 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpgkp\" (UniqueName: \"kubernetes.io/projected/3132bdf7-accb-4e05-833d-7765614688ea-kube-api-access-lpgkp\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.528396 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/3132bdf7-accb-4e05-833d-7765614688ea-runner\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.629714 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpgkp\" (UniqueName: \"kubernetes.io/projected/3132bdf7-accb-4e05-833d-7765614688ea-kube-api-access-lpgkp\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.629853 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/3132bdf7-accb-4e05-833d-7765614688ea-runner\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.630427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/3132bdf7-accb-4e05-833d-7765614688ea-runner\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.665751 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpgkp\" (UniqueName: \"kubernetes.io/projected/3132bdf7-accb-4e05-833d-7765614688ea-kube-api-access-lpgkp\") pod \"service-telemetry-operator-55b89ddfb9-5bwxr\" (UID: \"3132bdf7-accb-4e05-833d-7765614688ea\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:02 crc kubenswrapper[4792]: I0121 18:15:02.762031 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" Jan 21 18:15:03 crc kubenswrapper[4792]: I0121 18:15:03.214477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr"] Jan 21 18:15:03 crc kubenswrapper[4792]: W0121 18:15:03.222040 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3132bdf7_accb_4e05_833d_7765614688ea.slice/crio-aea8bfb06adeb408d90ae81d4cc8a225ffd6334ea2f62441a3f16dde34daccc2 WatchSource:0}: Error finding container aea8bfb06adeb408d90ae81d4cc8a225ffd6334ea2f62441a3f16dde34daccc2: Status 404 returned error can't find the container with id aea8bfb06adeb408d90ae81d4cc8a225ffd6334ea2f62441a3f16dde34daccc2 Jan 21 18:15:03 crc kubenswrapper[4792]: I0121 18:15:03.593401 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" event={"ID":"3132bdf7-accb-4e05-833d-7765614688ea","Type":"ContainerStarted","Data":"aea8bfb06adeb408d90ae81d4cc8a225ffd6334ea2f62441a3f16dde34daccc2"} Jan 21 18:15:03 crc kubenswrapper[4792]: I0121 18:15:03.598672 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" event={"ID":"54e9171e-bf7c-49e7-8fda-1063833badf9","Type":"ContainerStarted","Data":"1ebbf21a82c1d6bb6b63d6324077c5be3847620544afe000ece52ae8bcbb4717"} Jan 21 18:15:03 crc kubenswrapper[4792]: I0121 18:15:03.626570 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" podStartSLOduration=3.62653594 podStartE2EDuration="3.62653594s" podCreationTimestamp="2026-01-21 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:15:03.619791572 +0000 UTC m=+1137.601754758" watchObservedRunningTime="2026-01-21 18:15:03.62653594 +0000 UTC m=+1137.608499146" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.282815 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-ftf54"] Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.284156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.291637 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-zjnk2" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.291680 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-ftf54"] Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.365541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gslv9\" (UniqueName: \"kubernetes.io/projected/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-kube-api-access-gslv9\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.365607 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-runner\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.467276 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gslv9\" (UniqueName: \"kubernetes.io/projected/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-kube-api-access-gslv9\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.467350 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-runner\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.468146 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-runner\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.501615 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gslv9\" (UniqueName: \"kubernetes.io/projected/ed7cf8c1-4937-4143-b230-fdd1c474c3ac-kube-api-access-gslv9\") pod \"smart-gateway-operator-bbbc889bc-ftf54\" (UID: \"ed7cf8c1-4937-4143-b230-fdd1c474c3ac\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.622567 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.646790 4792 generic.go:334] "Generic (PLEG): container finished" podID="54e9171e-bf7c-49e7-8fda-1063833badf9" containerID="1ebbf21a82c1d6bb6b63d6324077c5be3847620544afe000ece52ae8bcbb4717" exitCode=0 Jan 21 18:15:04 crc kubenswrapper[4792]: I0121 18:15:04.646980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" event={"ID":"54e9171e-bf7c-49e7-8fda-1063833badf9","Type":"ContainerDied","Data":"1ebbf21a82c1d6bb6b63d6324077c5be3847620544afe000ece52ae8bcbb4717"} Jan 21 18:15:05 crc kubenswrapper[4792]: I0121 18:15:05.160197 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-ftf54"] Jan 21 18:15:05 crc kubenswrapper[4792]: W0121 18:15:05.181099 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded7cf8c1_4937_4143_b230_fdd1c474c3ac.slice/crio-b2aa500ce78546353ff0dbeda9149c80e17c01c69aacb9d66bf5707026075cc5 WatchSource:0}: Error finding container b2aa500ce78546353ff0dbeda9149c80e17c01c69aacb9d66bf5707026075cc5: Status 404 returned error can't find the container with id b2aa500ce78546353ff0dbeda9149c80e17c01c69aacb9d66bf5707026075cc5 Jan 21 18:15:05 crc kubenswrapper[4792]: I0121 18:15:05.656087 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" event={"ID":"ed7cf8c1-4937-4143-b230-fdd1c474c3ac","Type":"ContainerStarted","Data":"b2aa500ce78546353ff0dbeda9149c80e17c01c69aacb9d66bf5707026075cc5"} Jan 21 18:15:05 crc kubenswrapper[4792]: I0121 18:15:05.901348 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.005125 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp2k8\" (UniqueName: \"kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8\") pod \"54e9171e-bf7c-49e7-8fda-1063833badf9\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.005216 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume\") pod \"54e9171e-bf7c-49e7-8fda-1063833badf9\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.005389 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume\") pod \"54e9171e-bf7c-49e7-8fda-1063833badf9\" (UID: \"54e9171e-bf7c-49e7-8fda-1063833badf9\") " Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.007224 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume" (OuterVolumeSpecName: "config-volume") pod "54e9171e-bf7c-49e7-8fda-1063833badf9" (UID: "54e9171e-bf7c-49e7-8fda-1063833badf9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.019218 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "54e9171e-bf7c-49e7-8fda-1063833badf9" (UID: "54e9171e-bf7c-49e7-8fda-1063833badf9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.019350 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8" (OuterVolumeSpecName: "kube-api-access-dp2k8") pod "54e9171e-bf7c-49e7-8fda-1063833badf9" (UID: "54e9171e-bf7c-49e7-8fda-1063833badf9"). InnerVolumeSpecName "kube-api-access-dp2k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.109568 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/54e9171e-bf7c-49e7-8fda-1063833badf9-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.109641 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp2k8\" (UniqueName: \"kubernetes.io/projected/54e9171e-bf7c-49e7-8fda-1063833badf9-kube-api-access-dp2k8\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.109656 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/54e9171e-bf7c-49e7-8fda-1063833badf9-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.707958 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" event={"ID":"54e9171e-bf7c-49e7-8fda-1063833badf9","Type":"ContainerDied","Data":"2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25"} Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.708418 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f604d7d4692ae4ed6c6d5e52c637e98c4eed3a45dcc75a636b590569851de25" Jan 21 18:15:06 crc kubenswrapper[4792]: I0121 18:15:06.708494 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz" Jan 21 18:15:42 crc kubenswrapper[4792]: E0121 18:15:42.421837 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/service-telemetry-operator:latest" Jan 21 18:15:42 crc kubenswrapper[4792]: E0121 18:15:42.422979 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/service-telemetry-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:service-telemetry-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_WEBHOOK_SNMP_IMAGE,Value:quay.io/infrawatch/prometheus-webhook-snmp:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_IMAGE,Value:quay.io/prometheus/prometheus:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER_IMAGE,Value:quay.io/prometheus/alertmanager:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:service-telemetry-operator.v1.5.1768085182,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lpgkp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod service-telemetry-operator-55b89ddfb9-5bwxr_service-telemetry(3132bdf7-accb-4e05-833d-7765614688ea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:15:42 crc kubenswrapper[4792]: E0121 18:15:42.424216 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" podUID="3132bdf7-accb-4e05-833d-7765614688ea" Jan 21 18:15:43 crc kubenswrapper[4792]: E0121 18:15:43.026122 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Jan 21 18:15:43 crc kubenswrapper[4792]: E0121 18:15:43.026401 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1768085178,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gslv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-bbbc889bc-ftf54_service-telemetry(ed7cf8c1-4937-4143-b230-fdd1c474c3ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:15:43 crc kubenswrapper[4792]: E0121 18:15:43.028092 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" podUID="ed7cf8c1-4937-4143-b230-fdd1c474c3ac" Jan 21 18:15:43 crc kubenswrapper[4792]: I0121 18:15:43.426011 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" event={"ID":"832ffd66-8482-4991-b666-ebc765fc5f8a","Type":"ContainerStarted","Data":"fea6b69309c7445b77291f1d8640af15094c24829e5bc66a0582b1d2bd2e3c70"} Jan 21 18:15:43 crc kubenswrapper[4792]: E0121 18:15:43.428372 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" podUID="ed7cf8c1-4937-4143-b230-fdd1c474c3ac" Jan 21 18:15:43 crc kubenswrapper[4792]: E0121 18:15:43.429561 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/service-telemetry-operator:latest\\\"\"" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" podUID="3132bdf7-accb-4e05-833d-7765614688ea" Jan 21 18:15:43 crc kubenswrapper[4792]: I0121 18:15:43.444563 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-qtp67" podStartSLOduration=23.158627897 podStartE2EDuration="44.444535447s" podCreationTimestamp="2026-01-21 18:14:59 +0000 UTC" firstStartedPulling="2026-01-21 18:15:00.422188709 +0000 UTC m=+1134.404151895" lastFinishedPulling="2026-01-21 18:15:21.708096259 +0000 UTC m=+1155.690059445" observedRunningTime="2026-01-21 18:15:43.44253298 +0000 UTC m=+1177.424496166" watchObservedRunningTime="2026-01-21 18:15:43.444535447 +0000 UTC m=+1177.426498663" Jan 21 18:15:53 crc kubenswrapper[4792]: I0121 18:15:53.570700 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:15:53 crc kubenswrapper[4792]: I0121 18:15:53.571118 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:15:56 crc kubenswrapper[4792]: I0121 18:15:56.525342 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" event={"ID":"ed7cf8c1-4937-4143-b230-fdd1c474c3ac","Type":"ContainerStarted","Data":"64a5102e48c7cbbf3767b12c80c2e8cc05f9b3190eb67ce395d7445398975fd9"} Jan 21 18:15:56 crc kubenswrapper[4792]: I0121 18:15:56.547604 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bbbc889bc-ftf54" podStartSLOduration=2.009065724 podStartE2EDuration="52.547587013s" podCreationTimestamp="2026-01-21 18:15:04 +0000 UTC" firstStartedPulling="2026-01-21 18:15:05.183290785 +0000 UTC m=+1139.165253971" lastFinishedPulling="2026-01-21 18:15:55.721812074 +0000 UTC m=+1189.703775260" observedRunningTime="2026-01-21 18:15:56.546020049 +0000 UTC m=+1190.527983235" watchObservedRunningTime="2026-01-21 18:15:56.547587013 +0000 UTC m=+1190.529550199" Jan 21 18:15:57 crc kubenswrapper[4792]: I0121 18:15:57.538009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" event={"ID":"3132bdf7-accb-4e05-833d-7765614688ea","Type":"ContainerStarted","Data":"8db8de795db963d8b9b2ed78bf405da237c89f04d7514a5f8987704160727b43"} Jan 21 18:15:57 crc kubenswrapper[4792]: I0121 18:15:57.563518 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-5bwxr" podStartSLOduration=1.9123431530000001 podStartE2EDuration="55.563481225s" podCreationTimestamp="2026-01-21 18:15:02 +0000 UTC" firstStartedPulling="2026-01-21 18:15:03.224207397 +0000 UTC m=+1137.206170593" lastFinishedPulling="2026-01-21 18:15:56.875345489 +0000 UTC m=+1190.857308665" observedRunningTime="2026-01-21 18:15:57.557783326 +0000 UTC m=+1191.539746512" watchObservedRunningTime="2026-01-21 18:15:57.563481225 +0000 UTC m=+1191.545444411" Jan 21 18:16:23 crc kubenswrapper[4792]: I0121 18:16:23.570310 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:16:23 crc kubenswrapper[4792]: I0121 18:16:23.571527 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.775224 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:16:28 crc kubenswrapper[4792]: E0121 18:16:28.775822 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54e9171e-bf7c-49e7-8fda-1063833badf9" containerName="collect-profiles" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.775840 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="54e9171e-bf7c-49e7-8fda-1063833badf9" containerName="collect-profiles" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.775991 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="54e9171e-bf7c-49e7-8fda-1063833badf9" containerName="collect-profiles" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.776458 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.778806 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.778899 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.778817 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.779450 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.779674 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.783616 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.789793 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-lspbz" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.804278 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.858547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.858942 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.858987 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.859013 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.859112 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.859152 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfzvv\" (UniqueName: \"kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.859224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960131 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960185 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960233 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.960332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfzvv\" (UniqueName: \"kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.961412 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.971130 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.971330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.972708 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.973055 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.983970 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:28 crc kubenswrapper[4792]: I0121 18:16:28.998896 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfzvv\" (UniqueName: \"kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv\") pod \"default-interconnect-68864d46cb-89qv7\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:29 crc kubenswrapper[4792]: I0121 18:16:29.099422 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:16:29 crc kubenswrapper[4792]: I0121 18:16:29.311444 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:16:29 crc kubenswrapper[4792]: I0121 18:16:29.316497 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:16:29 crc kubenswrapper[4792]: I0121 18:16:29.764194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" event={"ID":"434bbf11-14ad-4d1d-91ba-f389391345ed","Type":"ContainerStarted","Data":"c3b61fcae33c23d2121348bd35f3f24315ae757d6e3dadf10e0e0eefe10553cd"} Jan 21 18:16:41 crc kubenswrapper[4792]: I0121 18:16:41.865005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" event={"ID":"434bbf11-14ad-4d1d-91ba-f389391345ed","Type":"ContainerStarted","Data":"2ab42e4ac198a0139a95a8cacf10aa0830d029cb65fb951c69781ada5b9ea992"} Jan 21 18:16:41 crc kubenswrapper[4792]: I0121 18:16:41.888650 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" podStartSLOduration=1.9626316259999999 podStartE2EDuration="13.88862734s" podCreationTimestamp="2026-01-21 18:16:28 +0000 UTC" firstStartedPulling="2026-01-21 18:16:29.316212358 +0000 UTC m=+1223.298175544" lastFinishedPulling="2026-01-21 18:16:41.242208072 +0000 UTC m=+1235.224171258" observedRunningTime="2026-01-21 18:16:41.882827681 +0000 UTC m=+1235.864790867" watchObservedRunningTime="2026-01-21 18:16:41.88862734 +0000 UTC m=+1235.870590546" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.663313 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.665190 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.668211 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.668234 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.668212 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-1" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.668926 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.669011 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.669089 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-2" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.669610 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-c8v2r" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.670367 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.670635 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.679656 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.680888 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-tls-assets\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805447 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6056a47c-877d-43bb-847d-8b48c0156a6a-config-out\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805475 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805565 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805696 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805750 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx4zb\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-kube-api-access-cx4zb\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.805966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.806015 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-web-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.907933 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6056a47c-877d-43bb-847d-8b48c0156a6a-config-out\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908320 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908578 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908682 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: E0121 18:16:44.908909 4792 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 21 18:16:44 crc kubenswrapper[4792]: E0121 18:16:44.908990 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls podName:6056a47c-877d-43bb-847d-8b48c0156a6a nodeName:}" failed. No retries permitted until 2026-01-21 18:16:45.408971333 +0000 UTC m=+1239.390934519 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "6056a47c-877d-43bb-847d-8b48c0156a6a") : secret "default-prometheus-proxy-tls" not found Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.908916 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.909103 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx4zb\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-kube-api-access-cx4zb\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.909233 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.909274 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-web-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.909330 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-tls-assets\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.909403 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.923296 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.923744 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7a08b8c286aea9f3c219dcffa66d5ad3b86808fb153afcc083d2eb0a127aad35/globalmount\"" pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.975460 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6056a47c-877d-43bb-847d-8b48c0156a6a-config-out\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.975989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.977235 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.977308 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.977586 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/6056a47c-877d-43bb-847d-8b48c0156a6a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.979339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.986115 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-web-config\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.986197 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.986370 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-tls-assets\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:44 crc kubenswrapper[4792]: I0121 18:16:44.986385 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx4zb\" (UniqueName: \"kubernetes.io/projected/6056a47c-877d-43bb-847d-8b48c0156a6a-kube-api-access-cx4zb\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:45 crc kubenswrapper[4792]: I0121 18:16:45.004331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3e72cfa3-24c8-4ef5-ba99-c4d024867d25\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:45 crc kubenswrapper[4792]: I0121 18:16:45.417140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:45 crc kubenswrapper[4792]: E0121 18:16:45.417377 4792 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 21 18:16:45 crc kubenswrapper[4792]: E0121 18:16:45.417478 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls podName:6056a47c-877d-43bb-847d-8b48c0156a6a nodeName:}" failed. No retries permitted until 2026-01-21 18:16:46.417456191 +0000 UTC m=+1240.399419387 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "6056a47c-877d-43bb-847d-8b48c0156a6a") : secret "default-prometheus-proxy-tls" not found Jan 21 18:16:46 crc kubenswrapper[4792]: I0121 18:16:46.431227 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:46 crc kubenswrapper[4792]: I0121 18:16:46.434983 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6056a47c-877d-43bb-847d-8b48c0156a6a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"6056a47c-877d-43bb-847d-8b48c0156a6a\") " pod="service-telemetry/prometheus-default-0" Jan 21 18:16:46 crc kubenswrapper[4792]: I0121 18:16:46.488514 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 21 18:16:46 crc kubenswrapper[4792]: I0121 18:16:46.764577 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 21 18:16:46 crc kubenswrapper[4792]: I0121 18:16:46.916073 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerStarted","Data":"ea6db8930cdf407d27b1c90c030a9cbe1a760ebea838aaef1db29385a237d009"} Jan 21 18:16:52 crc kubenswrapper[4792]: I0121 18:16:52.960753 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerStarted","Data":"e9c54c26c826e3d58d0d6946e823f41551865ea4fb87d1d39d1c8986b3184dc8"} Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.571246 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.571337 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.571402 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.572302 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.572377 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02" gracePeriod=600 Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.970628 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02" exitCode=0 Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.970888 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02"} Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.971058 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3"} Jan 21 18:16:53 crc kubenswrapper[4792]: I0121 18:16:53.971100 4792 scope.go:117] "RemoveContainer" containerID="51a7f3d84d23321326653dd420e73da4fa0fbd2379dcc7fa479dc2a2a53c626e" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.211541 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf"] Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.213551 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.237237 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf"] Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.369937 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qmzv\" (UniqueName: \"kubernetes.io/projected/21374324-faf0-4c74-864a-8bf6b2819027-kube-api-access-6qmzv\") pod \"default-snmp-webhook-78bcbbdcff-sfzvf\" (UID: \"21374324-faf0-4c74-864a-8bf6b2819027\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.472645 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qmzv\" (UniqueName: \"kubernetes.io/projected/21374324-faf0-4c74-864a-8bf6b2819027-kube-api-access-6qmzv\") pod \"default-snmp-webhook-78bcbbdcff-sfzvf\" (UID: \"21374324-faf0-4c74-864a-8bf6b2819027\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.491627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qmzv\" (UniqueName: \"kubernetes.io/projected/21374324-faf0-4c74-864a-8bf6b2819027-kube-api-access-6qmzv\") pod \"default-snmp-webhook-78bcbbdcff-sfzvf\" (UID: \"21374324-faf0-4c74-864a-8bf6b2819027\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.542540 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" Jan 21 18:16:57 crc kubenswrapper[4792]: I0121 18:16:57.786933 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf"] Jan 21 18:16:58 crc kubenswrapper[4792]: I0121 18:16:58.007403 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" event={"ID":"21374324-faf0-4c74-864a-8bf6b2819027","Type":"ContainerStarted","Data":"90335e5737c0450d201541628911b87367bc33f231840d422491403f31b5fac8"} Jan 21 18:17:03 crc kubenswrapper[4792]: I0121 18:17:03.371325 4792 generic.go:334] "Generic (PLEG): container finished" podID="6056a47c-877d-43bb-847d-8b48c0156a6a" containerID="e9c54c26c826e3d58d0d6946e823f41551865ea4fb87d1d39d1c8986b3184dc8" exitCode=0 Jan 21 18:17:03 crc kubenswrapper[4792]: I0121 18:17:03.371395 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerDied","Data":"e9c54c26c826e3d58d0d6946e823f41551865ea4fb87d1d39d1c8986b3184dc8"} Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.437523 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.440067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.444254 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.444549 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.444641 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.444772 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.445706 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-x9xdb" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.451765 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.456334 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-80c0d296-6d62-40eb-9121-938917dc9e29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-80c0d296-6d62-40eb-9121-938917dc9e29\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628416 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-volume\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-out\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628501 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628519 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-tls-assets\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628550 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628576 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-web-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.628615 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl88j\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-kube-api-access-bl88j\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729538 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729594 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-tls-assets\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729617 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729641 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-web-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl88j\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-kube-api-access-bl88j\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729737 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-80c0d296-6d62-40eb-9121-938917dc9e29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-80c0d296-6d62-40eb-9121-938917dc9e29\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-volume\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.729804 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-out\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: E0121 18:17:05.729919 4792 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 21 18:17:05 crc kubenswrapper[4792]: E0121 18:17:05.730051 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls podName:e8fa9d25-e034-4893-839b-70f33ad44b2e nodeName:}" failed. No retries permitted until 2026-01-21 18:17:06.230011746 +0000 UTC m=+1260.211975102 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "e8fa9d25-e034-4893-839b-70f33ad44b2e") : secret "default-alertmanager-proxy-tls" not found Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.737269 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-tls-assets\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.737314 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-web-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.737447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-volume\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.742476 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.743289 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e8fa9d25-e034-4893-839b-70f33ad44b2e-config-out\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.745538 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.753322 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl88j\" (UniqueName: \"kubernetes.io/projected/e8fa9d25-e034-4893-839b-70f33ad44b2e-kube-api-access-bl88j\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.759908 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.759996 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-80c0d296-6d62-40eb-9121-938917dc9e29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-80c0d296-6d62-40eb-9121-938917dc9e29\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9056d05eb64909d8efd8577152a55a91a1e62b062048891c98d5f5dbd150b319/globalmount\"" pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:05 crc kubenswrapper[4792]: I0121 18:17:05.799009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-80c0d296-6d62-40eb-9121-938917dc9e29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-80c0d296-6d62-40eb-9121-938917dc9e29\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:06 crc kubenswrapper[4792]: I0121 18:17:06.383392 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:06 crc kubenswrapper[4792]: E0121 18:17:06.383638 4792 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 21 18:17:06 crc kubenswrapper[4792]: E0121 18:17:06.383700 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls podName:e8fa9d25-e034-4893-839b-70f33ad44b2e nodeName:}" failed. No retries permitted until 2026-01-21 18:17:07.383681874 +0000 UTC m=+1261.365645060 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "e8fa9d25-e034-4893-839b-70f33ad44b2e") : secret "default-alertmanager-proxy-tls" not found Jan 21 18:17:07 crc kubenswrapper[4792]: I0121 18:17:07.467685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:07 crc kubenswrapper[4792]: I0121 18:17:07.477627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e8fa9d25-e034-4893-839b-70f33ad44b2e-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"e8fa9d25-e034-4893-839b-70f33ad44b2e\") " pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:07 crc kubenswrapper[4792]: I0121 18:17:07.562012 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-x9xdb" Jan 21 18:17:07 crc kubenswrapper[4792]: I0121 18:17:07.570689 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 21 18:17:14 crc kubenswrapper[4792]: E0121 18:17:14.983467 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/prometheus-webhook-snmp:latest" Jan 21 18:17:14 crc kubenswrapper[4792]: E0121 18:17:14.984051 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-webhook-snmp,Image:quay.io/infrawatch/prometheus-webhook-snmp:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:,HostPort:0,ContainerPort:9099,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:SNMP_COMMUNITY,Value:public,ValueFrom:nil,},EnvVar{Name:SNMP_RETRIES,Value:5,ValueFrom:nil,},EnvVar{Name:SNMP_HOST,Value:192.168.24.254,ValueFrom:nil,},EnvVar{Name:SNMP_PORT,Value:162,ValueFrom:nil,},EnvVar{Name:SNMP_TIMEOUT,Value:1,ValueFrom:nil,},EnvVar{Name:ALERT_OID_LABEL,Value:oid,ValueFrom:nil,},EnvVar{Name:TRAP_OID_PREFIX,Value:1.3.6.1.4.1.50495.15,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_OID,Value:1.3.6.1.4.1.50495.15.1.2.1,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_SEVERITY,Value:,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6qmzv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-snmp-webhook-78bcbbdcff-sfzvf_service-telemetry(21374324-faf0-4c74-864a-8bf6b2819027): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:17:14 crc kubenswrapper[4792]: E0121 18:17:14.985775 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" podUID="21374324-faf0-4c74-864a-8bf6b2819027" Jan 21 18:17:15 crc kubenswrapper[4792]: I0121 18:17:15.610886 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 21 18:17:15 crc kubenswrapper[4792]: E0121 18:17:15.687031 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/prometheus-webhook-snmp:latest\\\"\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" podUID="21374324-faf0-4c74-864a-8bf6b2819027" Jan 21 18:17:16 crc kubenswrapper[4792]: W0121 18:17:16.087275 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8fa9d25_e034_4893_839b_70f33ad44b2e.slice/crio-e513b47376d6a1e7bbaad3b6091b229786481d049793302a6bde9c99c3cfc5c4 WatchSource:0}: Error finding container e513b47376d6a1e7bbaad3b6091b229786481d049793302a6bde9c99c3cfc5c4: Status 404 returned error can't find the container with id e513b47376d6a1e7bbaad3b6091b229786481d049793302a6bde9c99c3cfc5c4 Jan 21 18:17:16 crc kubenswrapper[4792]: I0121 18:17:16.686560 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerStarted","Data":"e513b47376d6a1e7bbaad3b6091b229786481d049793302a6bde9c99c3cfc5c4"} Jan 21 18:17:19 crc kubenswrapper[4792]: I0121 18:17:19.809969 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerStarted","Data":"45c8428bbfc3657d70e14757f6e8251de4d1e835d28830e0ab4f3080593cadb7"} Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.659814 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8"] Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.662129 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.667047 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.667090 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.667571 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-26lkj" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.679431 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.680746 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8"] Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.746588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ed633e5-26cf-469c-b1b2-250229e6d602-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.746642 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ed633e5-26cf-469c-b1b2-250229e6d602-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.746686 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.746714 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.746786 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tttgp\" (UniqueName: \"kubernetes.io/projected/5ed633e5-26cf-469c-b1b2-250229e6d602-kube-api-access-tttgp\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848034 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ed633e5-26cf-469c-b1b2-250229e6d602-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848121 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ed633e5-26cf-469c-b1b2-250229e6d602-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848201 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848272 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tttgp\" (UniqueName: \"kubernetes.io/projected/5ed633e5-26cf-469c-b1b2-250229e6d602-kube-api-access-tttgp\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: E0121 18:17:25.848613 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.848728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ed633e5-26cf-469c-b1b2-250229e6d602-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: E0121 18:17:25.848941 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls podName:5ed633e5-26cf-469c-b1b2-250229e6d602 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:26.348919169 +0000 UTC m=+1280.330882355 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" (UID: "5ed633e5-26cf-469c-b1b2-250229e6d602") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.849455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ed633e5-26cf-469c-b1b2-250229e6d602-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.858772 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:25 crc kubenswrapper[4792]: I0121 18:17:25.884943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tttgp\" (UniqueName: \"kubernetes.io/projected/5ed633e5-26cf-469c-b1b2-250229e6d602-kube-api-access-tttgp\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:26 crc kubenswrapper[4792]: I0121 18:17:26.354577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:26 crc kubenswrapper[4792]: E0121 18:17:26.354757 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 21 18:17:26 crc kubenswrapper[4792]: E0121 18:17:26.354834 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls podName:5ed633e5-26cf-469c-b1b2-250229e6d602 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:27.354816006 +0000 UTC m=+1281.336779192 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" (UID: "5ed633e5-26cf-469c-b1b2-250229e6d602") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 21 18:17:27 crc kubenswrapper[4792]: I0121 18:17:27.368918 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:27 crc kubenswrapper[4792]: I0121 18:17:27.374298 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ed633e5-26cf-469c-b1b2-250229e6d602-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8\" (UID: \"5ed633e5-26cf-469c-b1b2-250229e6d602\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:27 crc kubenswrapper[4792]: I0121 18:17:27.515924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.318212 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs"] Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.319645 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.322349 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.327335 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.331462 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs"] Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.399941 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a86979f-5891-42c1-89ce-6b0aefd1af14-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.400025 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x86hm\" (UniqueName: \"kubernetes.io/projected/3a86979f-5891-42c1-89ce-6b0aefd1af14-kube-api-access-x86hm\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.400066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.400093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a86979f-5891-42c1-89ce-6b0aefd1af14-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.400156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.501298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a86979f-5891-42c1-89ce-6b0aefd1af14-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.501397 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x86hm\" (UniqueName: \"kubernetes.io/projected/3a86979f-5891-42c1-89ce-6b0aefd1af14-kube-api-access-x86hm\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.501428 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.501446 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a86979f-5891-42c1-89ce-6b0aefd1af14-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.501476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: E0121 18:17:28.502101 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 21 18:17:28 crc kubenswrapper[4792]: E0121 18:17:28.502266 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls podName:3a86979f-5891-42c1-89ce-6b0aefd1af14 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:29.002243103 +0000 UTC m=+1282.984206299 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" (UID: "3a86979f-5891-42c1-89ce-6b0aefd1af14") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.503092 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/3a86979f-5891-42c1-89ce-6b0aefd1af14-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.503272 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/3a86979f-5891-42c1-89ce-6b0aefd1af14-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.530220 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x86hm\" (UniqueName: \"kubernetes.io/projected/3a86979f-5891-42c1-89ce-6b0aefd1af14-kube-api-access-x86hm\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:28 crc kubenswrapper[4792]: I0121 18:17:28.533639 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.012380 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8"] Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.013323 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:29 crc kubenswrapper[4792]: E0121 18:17:29.013427 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 21 18:17:29 crc kubenswrapper[4792]: E0121 18:17:29.015352 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls podName:3a86979f-5891-42c1-89ce-6b0aefd1af14 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:30.015335507 +0000 UTC m=+1283.997298693 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" (UID: "3a86979f-5891-42c1-89ce-6b0aefd1af14") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 21 18:17:29 crc kubenswrapper[4792]: W0121 18:17:29.023384 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ed633e5_26cf_469c_b1b2_250229e6d602.slice/crio-b7d65c4d8ee9e9812eb4efc34f343db11b535847ca73e491dd0d716a5a4bba41 WatchSource:0}: Error finding container b7d65c4d8ee9e9812eb4efc34f343db11b535847ca73e491dd0d716a5a4bba41: Status 404 returned error can't find the container with id b7d65c4d8ee9e9812eb4efc34f343db11b535847ca73e491dd0d716a5a4bba41 Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.930636 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerStarted","Data":"b9497ebbf19170ef889cf11beb108df7b8dca977096854d38ef4570dc59985a8"} Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.932509 4792 generic.go:334] "Generic (PLEG): container finished" podID="e8fa9d25-e034-4893-839b-70f33ad44b2e" containerID="45c8428bbfc3657d70e14757f6e8251de4d1e835d28830e0ab4f3080593cadb7" exitCode=0 Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.932566 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerDied","Data":"45c8428bbfc3657d70e14757f6e8251de4d1e835d28830e0ab4f3080593cadb7"} Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.940256 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"b7d65c4d8ee9e9812eb4efc34f343db11b535847ca73e491dd0d716a5a4bba41"} Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.946563 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" event={"ID":"21374324-faf0-4c74-864a-8bf6b2819027","Type":"ContainerStarted","Data":"97d358ccb86edbce6912c862a5d108ae16b892f86dd0e60b049cef48e7413ad5"} Jan 21 18:17:29 crc kubenswrapper[4792]: I0121 18:17:29.998728 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-sfzvf" podStartSLOduration=1.923664306 podStartE2EDuration="32.998700327s" podCreationTimestamp="2026-01-21 18:16:57 +0000 UTC" firstStartedPulling="2026-01-21 18:16:57.814691313 +0000 UTC m=+1251.796654499" lastFinishedPulling="2026-01-21 18:17:28.889727334 +0000 UTC m=+1282.871690520" observedRunningTime="2026-01-21 18:17:29.996286091 +0000 UTC m=+1283.978249277" watchObservedRunningTime="2026-01-21 18:17:29.998700327 +0000 UTC m=+1283.980663513" Jan 21 18:17:30 crc kubenswrapper[4792]: I0121 18:17:30.022625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:30 crc kubenswrapper[4792]: I0121 18:17:30.046230 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a86979f-5891-42c1-89ce-6b0aefd1af14-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs\" (UID: \"3a86979f-5891-42c1-89ce-6b0aefd1af14\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:30 crc kubenswrapper[4792]: I0121 18:17:30.139638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" Jan 21 18:17:30 crc kubenswrapper[4792]: I0121 18:17:30.626047 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs"] Jan 21 18:17:30 crc kubenswrapper[4792]: W0121 18:17:30.767810 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a86979f_5891_42c1_89ce_6b0aefd1af14.slice/crio-33af5db0bc5855388fc64fb309b5e5b00b153ec4a1930070382cd71742836aa3 WatchSource:0}: Error finding container 33af5db0bc5855388fc64fb309b5e5b00b153ec4a1930070382cd71742836aa3: Status 404 returned error can't find the container with id 33af5db0bc5855388fc64fb309b5e5b00b153ec4a1930070382cd71742836aa3 Jan 21 18:17:30 crc kubenswrapper[4792]: I0121 18:17:30.963624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerStarted","Data":"33af5db0bc5855388fc64fb309b5e5b00b153ec4a1930070382cd71742836aa3"} Jan 21 18:17:32 crc kubenswrapper[4792]: I0121 18:17:32.104574 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerStarted","Data":"9073891cc06703d1bf0248d108661929fb5383fe406ee5ffa5f5cf797243f2d2"} Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.045313 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj"] Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.053138 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.056386 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.057227 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.064330 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj"] Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.175156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.175228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.175258 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-442cm\" (UniqueName: \"kubernetes.io/projected/e991533b-8af1-438b-99fb-31ed1b1db4d9-kube-api-access-442cm\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.175302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e991533b-8af1-438b-99fb-31ed1b1db4d9-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.176256 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e991533b-8af1-438b-99fb-31ed1b1db4d9-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.277738 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e991533b-8af1-438b-99fb-31ed1b1db4d9-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.278084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.278192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.278229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-442cm\" (UniqueName: \"kubernetes.io/projected/e991533b-8af1-438b-99fb-31ed1b1db4d9-kube-api-access-442cm\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.278244 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e991533b-8af1-438b-99fb-31ed1b1db4d9-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.278353 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e991533b-8af1-438b-99fb-31ed1b1db4d9-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.279532 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e991533b-8af1-438b-99fb-31ed1b1db4d9-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: E0121 18:17:35.279676 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 21 18:17:35 crc kubenswrapper[4792]: E0121 18:17:35.279751 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls podName:e991533b-8af1-438b-99fb-31ed1b1db4d9 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:35.779730101 +0000 UTC m=+1289.761693287 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" (UID: "e991533b-8af1-438b-99fb-31ed1b1db4d9") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.285080 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.301020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-442cm\" (UniqueName: \"kubernetes.io/projected/e991533b-8af1-438b-99fb-31ed1b1db4d9-kube-api-access-442cm\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: I0121 18:17:35.786439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:35 crc kubenswrapper[4792]: E0121 18:17:35.786701 4792 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 21 18:17:35 crc kubenswrapper[4792]: E0121 18:17:35.786780 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls podName:e991533b-8af1-438b-99fb-31ed1b1db4d9 nodeName:}" failed. No retries permitted until 2026-01-21 18:17:36.786762768 +0000 UTC m=+1290.768725954 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" (UID: "e991533b-8af1-438b-99fb-31ed1b1db4d9") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 21 18:17:36 crc kubenswrapper[4792]: I0121 18:17:36.842990 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:36 crc kubenswrapper[4792]: I0121 18:17:36.854648 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e991533b-8af1-438b-99fb-31ed1b1db4d9-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj\" (UID: \"e991533b-8af1-438b-99fb-31ed1b1db4d9\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:36 crc kubenswrapper[4792]: I0121 18:17:36.889398 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" Jan 21 18:17:41 crc kubenswrapper[4792]: I0121 18:17:41.763083 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj"] Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.227547 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"6056a47c-877d-43bb-847d-8b48c0156a6a","Type":"ContainerStarted","Data":"6398d6f9ef988be9df6b2a026c78c30935f6f80fd109ab82e282066ffee70fb5"} Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.229662 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerStarted","Data":"39488708bd5dcb26916bb345f47cbf16ff1f0931012b1edc6197b2cabe343375"} Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.231267 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"dee67739dd54498d40a82a7dd6052bbafea21807713f0cd613d2729c06e0ef82"} Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.245784 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerStarted","Data":"523d0428cf99bcac9b84e6bb4641b370a677b0b4a4497412b3fe67d5990b3adc"} Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.267672 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"5d41c30507f4af16cad6c286136fa4a8ee81f6dddfd56e3360408f6cb4657211"} Jan 21 18:17:42 crc kubenswrapper[4792]: I0121 18:17:42.277824 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.778478529 podStartE2EDuration="59.277804362s" podCreationTimestamp="2026-01-21 18:16:43 +0000 UTC" firstStartedPulling="2026-01-21 18:16:46.772464596 +0000 UTC m=+1240.754427782" lastFinishedPulling="2026-01-21 18:17:41.271790429 +0000 UTC m=+1295.253753615" observedRunningTime="2026-01-21 18:17:42.265790612 +0000 UTC m=+1296.247753828" watchObservedRunningTime="2026-01-21 18:17:42.277804362 +0000 UTC m=+1296.259767548" Jan 21 18:17:44 crc kubenswrapper[4792]: I0121 18:17:44.274977 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerStarted","Data":"b5bab1b66a26f95654cf8cdfa6337c0d50117288bb2d57656f2f71cac3d7bc4b"} Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.292309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"e8fa9d25-e034-4893-839b-70f33ad44b2e","Type":"ContainerStarted","Data":"16e16825dbfdd112e1884576373ecdf68d9f1742c33c65abbf276e2c44ee6b58"} Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.300998 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"40a18658fc251b000b9f02d08ba1f83175749ed9262ecc16c2b8c8c88af77bf2"} Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.351256 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=26.807244233 podStartE2EDuration="41.351228734s" podCreationTimestamp="2026-01-21 18:17:04 +0000 UTC" firstStartedPulling="2026-01-21 18:17:29.93804073 +0000 UTC m=+1283.920003916" lastFinishedPulling="2026-01-21 18:17:44.482025231 +0000 UTC m=+1298.463988417" observedRunningTime="2026-01-21 18:17:45.329026703 +0000 UTC m=+1299.310989899" watchObservedRunningTime="2026-01-21 18:17:45.351228734 +0000 UTC m=+1299.333191920" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.820389 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn"] Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.822139 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.824477 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.824676 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.846165 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn"] Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.997281 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg6jw\" (UniqueName: \"kubernetes.io/projected/c9b8bebe-7f04-4c45-9455-60313f44f51e-kube-api-access-dg6jw\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.997407 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c9b8bebe-7f04-4c45-9455-60313f44f51e-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.997446 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/c9b8bebe-7f04-4c45-9455-60313f44f51e-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:45 crc kubenswrapper[4792]: I0121 18:17:45.997558 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c9b8bebe-7f04-4c45-9455-60313f44f51e-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.099640 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/c9b8bebe-7f04-4c45-9455-60313f44f51e-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.099752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c9b8bebe-7f04-4c45-9455-60313f44f51e-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.099797 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg6jw\" (UniqueName: \"kubernetes.io/projected/c9b8bebe-7f04-4c45-9455-60313f44f51e-kube-api-access-dg6jw\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.099834 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c9b8bebe-7f04-4c45-9455-60313f44f51e-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.101015 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c9b8bebe-7f04-4c45-9455-60313f44f51e-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.101260 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c9b8bebe-7f04-4c45-9455-60313f44f51e-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.107917 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/c9b8bebe-7f04-4c45-9455-60313f44f51e-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.121773 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg6jw\" (UniqueName: \"kubernetes.io/projected/c9b8bebe-7f04-4c45-9455-60313f44f51e-kube-api-access-dg6jw\") pod \"default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn\" (UID: \"c9b8bebe-7f04-4c45-9455-60313f44f51e\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.152905 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.495987 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.499195 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.609522 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Jan 21 18:17:46 crc kubenswrapper[4792]: I0121 18:17:46.735969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn"] Jan 21 18:17:46 crc kubenswrapper[4792]: W0121 18:17:46.748640 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9b8bebe_7f04_4c45_9455_60313f44f51e.slice/crio-f6290ae2bfe258e358a86a26c791ff3e767484df6fcf651333c61c7e05800317 WatchSource:0}: Error finding container f6290ae2bfe258e358a86a26c791ff3e767484df6fcf651333c61c7e05800317: Status 404 returned error can't find the container with id f6290ae2bfe258e358a86a26c791ff3e767484df6fcf651333c61c7e05800317 Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.189777 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694"] Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.193505 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.199557 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.206182 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694"] Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.274934 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1007e823-8fe4-4892-9891-3a9ac8df0d23-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.275367 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd2rm\" (UniqueName: \"kubernetes.io/projected/1007e823-8fe4-4892-9891-3a9ac8df0d23-kube-api-access-kd2rm\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.275925 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1007e823-8fe4-4892-9891-3a9ac8df0d23-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.276438 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/1007e823-8fe4-4892-9891-3a9ac8df0d23-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.323943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerStarted","Data":"f6290ae2bfe258e358a86a26c791ff3e767484df6fcf651333c61c7e05800317"} Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.377957 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/1007e823-8fe4-4892-9891-3a9ac8df0d23-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.378050 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1007e823-8fe4-4892-9891-3a9ac8df0d23-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.378107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd2rm\" (UniqueName: \"kubernetes.io/projected/1007e823-8fe4-4892-9891-3a9ac8df0d23-kube-api-access-kd2rm\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.378171 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1007e823-8fe4-4892-9891-3a9ac8df0d23-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.379330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1007e823-8fe4-4892-9891-3a9ac8df0d23-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.380479 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1007e823-8fe4-4892-9891-3a9ac8df0d23-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.383920 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.386123 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/1007e823-8fe4-4892-9891-3a9ac8df0d23-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.400604 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd2rm\" (UniqueName: \"kubernetes.io/projected/1007e823-8fe4-4892-9891-3a9ac8df0d23-kube-api-access-kd2rm\") pod \"default-cloud1-ceil-event-smartgateway-f99f584f7-bt694\" (UID: \"1007e823-8fe4-4892-9891-3a9ac8df0d23\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:47 crc kubenswrapper[4792]: I0121 18:17:47.523183 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" Jan 21 18:17:51 crc kubenswrapper[4792]: I0121 18:17:51.535340 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694"] Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.379162 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"98b25a80124c21f167806e9946495d135b93f21ae45964ca9f5892058516ff02"} Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.381723 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerStarted","Data":"af0ad3a3f071bdf58e4fca71904fc2ba8f13ffbfffc48f4776179e1ba0a5d389"} Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.381754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerStarted","Data":"e7025b192daddadc6d0644d38baa0fda358266d5447db2ba62c284dc4dfcfb05"} Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.385283 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"6109e06c230e7a62c4362813822645a01ce87b2ee98c74d4b39419907cae633c"} Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.387680 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerStarted","Data":"822fb7a0af06d21d964303b00c4ae1f472df3639ec2da0ff50e5c198c87d266b"} Jan 21 18:17:52 crc kubenswrapper[4792]: I0121 18:17:52.389505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerStarted","Data":"d9996ec3ea8d694a7f288dde7c8fab2db06e2e7555ed0233475379664ea6285d"} Jan 21 18:17:59 crc kubenswrapper[4792]: I0121 18:17:59.006906 4792 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.027352138s: [/var/lib/containers/storage/overlay/efc24c6f470d5d8d4b04da3284fb5146e32c573bf850efde21d9a6fefd4eb38b/diff /var/log/pods/service-telemetry_service-telemetry-operator-55b89ddfb9-5bwxr_3132bdf7-accb-4e05-833d-7765614688ea/operator/0.log]; will not log again for this container unless duration exceeds 2s Jan 21 18:18:07 crc kubenswrapper[4792]: I0121 18:18:07.373924 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:18:07 crc kubenswrapper[4792]: I0121 18:18:07.374799 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" podUID="434bbf11-14ad-4d1d-91ba-f389391345ed" containerName="default-interconnect" containerID="cri-o://2ab42e4ac198a0139a95a8cacf10aa0830d029cb65fb951c69781ada5b9ea992" gracePeriod=30 Jan 21 18:18:08 crc kubenswrapper[4792]: I0121 18:18:08.540023 4792 generic.go:334] "Generic (PLEG): container finished" podID="1007e823-8fe4-4892-9891-3a9ac8df0d23" containerID="af0ad3a3f071bdf58e4fca71904fc2ba8f13ffbfffc48f4776179e1ba0a5d389" exitCode=0 Jan 21 18:18:08 crc kubenswrapper[4792]: I0121 18:18:08.540128 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerDied","Data":"af0ad3a3f071bdf58e4fca71904fc2ba8f13ffbfffc48f4776179e1ba0a5d389"} Jan 21 18:18:08 crc kubenswrapper[4792]: I0121 18:18:08.542438 4792 generic.go:334] "Generic (PLEG): container finished" podID="434bbf11-14ad-4d1d-91ba-f389391345ed" containerID="2ab42e4ac198a0139a95a8cacf10aa0830d029cb65fb951c69781ada5b9ea992" exitCode=0 Jan 21 18:18:08 crc kubenswrapper[4792]: I0121 18:18:08.542458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" event={"ID":"434bbf11-14ad-4d1d-91ba-f389391345ed","Type":"ContainerDied","Data":"2ab42e4ac198a0139a95a8cacf10aa0830d029cb65fb951c69781ada5b9ea992"} Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.553102 4792 generic.go:334] "Generic (PLEG): container finished" podID="3a86979f-5891-42c1-89ce-6b0aefd1af14" containerID="822fb7a0af06d21d964303b00c4ae1f472df3639ec2da0ff50e5c198c87d266b" exitCode=0 Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.553187 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerDied","Data":"822fb7a0af06d21d964303b00c4ae1f472df3639ec2da0ff50e5c198c87d266b"} Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.555392 4792 generic.go:334] "Generic (PLEG): container finished" podID="e991533b-8af1-438b-99fb-31ed1b1db4d9" containerID="6109e06c230e7a62c4362813822645a01ce87b2ee98c74d4b39419907cae633c" exitCode=0 Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.555486 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerDied","Data":"6109e06c230e7a62c4362813822645a01ce87b2ee98c74d4b39419907cae633c"} Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.557523 4792 generic.go:334] "Generic (PLEG): container finished" podID="c9b8bebe-7f04-4c45-9455-60313f44f51e" containerID="d9996ec3ea8d694a7f288dde7c8fab2db06e2e7555ed0233475379664ea6285d" exitCode=0 Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.557606 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerDied","Data":"d9996ec3ea8d694a7f288dde7c8fab2db06e2e7555ed0233475379664ea6285d"} Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.560236 4792 generic.go:334] "Generic (PLEG): container finished" podID="5ed633e5-26cf-469c-b1b2-250229e6d602" containerID="98b25a80124c21f167806e9946495d135b93f21ae45964ca9f5892058516ff02" exitCode=0 Jan 21 18:18:09 crc kubenswrapper[4792]: I0121 18:18:09.560269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerDied","Data":"98b25a80124c21f167806e9946495d135b93f21ae45964ca9f5892058516ff02"} Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.932416 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/sg-core:latest" Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.932630 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/infrawatch/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-certs,ReadOnly:false,MountPath:/config/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dg6jw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_service-telemetry(c9b8bebe-7f04-4c45-9455-60313f44f51e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.935517 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" podUID="c9b8bebe-7f04-4c45-9455-60313f44f51e" Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.968561 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/sg-core:latest" Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.968746 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/infrawatch/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-442cm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_service-telemetry(e991533b-8af1-438b-99fb-31ed1b1db4d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:18:09 crc kubenswrapper[4792]: E0121 18:18:09.970118 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" podUID="e991533b-8af1-438b-99fb-31ed1b1db4d9" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.201343 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.240713 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ncxjb"] Jan 21 18:18:10 crc kubenswrapper[4792]: E0121 18:18:10.241173 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="434bbf11-14ad-4d1d-91ba-f389391345ed" containerName="default-interconnect" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.241191 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="434bbf11-14ad-4d1d-91ba-f389391345ed" containerName="default-interconnect" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.241391 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="434bbf11-14ad-4d1d-91ba-f389391345ed" containerName="default-interconnect" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.241997 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.269132 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ncxjb"] Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317374 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317491 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317547 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317750 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfzvv\" (UniqueName: \"kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317828 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317920 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.317952 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca\") pod \"434bbf11-14ad-4d1d-91ba-f389391345ed\" (UID: \"434bbf11-14ad-4d1d-91ba-f389391345ed\") " Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.318228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf6r2\" (UniqueName: \"kubernetes.io/projected/806a485e-8ece-4663-b323-8f01b038d9aa-kube-api-access-bf6r2\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.318350 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.318566 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.318794 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.319051 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.319214 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-config\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.319264 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.319344 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-users\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.319547 4792 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.324379 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.324787 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.324932 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.325894 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.326062 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.327199 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv" (OuterVolumeSpecName: "kube-api-access-kfzvv") pod "434bbf11-14ad-4d1d-91ba-f389391345ed" (UID: "434bbf11-14ad-4d1d-91ba-f389391345ed"). InnerVolumeSpecName "kube-api-access-kfzvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421322 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421416 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421466 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-config\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421524 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421560 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-users\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421592 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf6r2\" (UniqueName: \"kubernetes.io/projected/806a485e-8ece-4663-b323-8f01b038d9aa-kube-api-access-bf6r2\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421662 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfzvv\" (UniqueName: \"kubernetes.io/projected/434bbf11-14ad-4d1d-91ba-f389391345ed-kube-api-access-kfzvv\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421675 4792 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421687 4792 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421705 4792 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421718 4792 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-sasl-users\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.421730 4792 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/434bbf11-14ad-4d1d-91ba-f389391345ed-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.423395 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-config\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.425243 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.426475 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.426609 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.426775 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-sasl-users\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.427043 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/806a485e-8ece-4663-b323-8f01b038d9aa-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.439544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf6r2\" (UniqueName: \"kubernetes.io/projected/806a485e-8ece-4663-b323-8f01b038d9aa-kube-api-access-bf6r2\") pod \"default-interconnect-68864d46cb-ncxjb\" (UID: \"806a485e-8ece-4663-b323-8f01b038d9aa\") " pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.571190 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.575959 4792 scope.go:117] "RemoveContainer" containerID="6109e06c230e7a62c4362813822645a01ce87b2ee98c74d4b39419907cae633c" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.576764 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.577413 4792 scope.go:117] "RemoveContainer" containerID="d9996ec3ea8d694a7f288dde7c8fab2db06e2e7555ed0233475379664ea6285d" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.577556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-89qv7" event={"ID":"434bbf11-14ad-4d1d-91ba-f389391345ed","Type":"ContainerDied","Data":"c3b61fcae33c23d2121348bd35f3f24315ae757d6e3dadf10e0e0eefe10553cd"} Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.577683 4792 scope.go:117] "RemoveContainer" containerID="2ab42e4ac198a0139a95a8cacf10aa0830d029cb65fb951c69781ada5b9ea992" Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.748894 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:18:10 crc kubenswrapper[4792]: I0121 18:18:10.755088 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-89qv7"] Jan 21 18:18:11 crc kubenswrapper[4792]: I0121 18:18:11.406211 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ncxjb"] Jan 21 18:18:11 crc kubenswrapper[4792]: W0121 18:18:11.692450 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod806a485e_8ece_4663_b323_8f01b038d9aa.slice/crio-c2ff3dd07d504891a66256c96e0b5fa297b2f9bfced4d9640fbe6e62f181b746 WatchSource:0}: Error finding container c2ff3dd07d504891a66256c96e0b5fa297b2f9bfced4d9640fbe6e62f181b746: Status 404 returned error can't find the container with id c2ff3dd07d504891a66256c96e0b5fa297b2f9bfced4d9640fbe6e62f181b746 Jan 21 18:18:11 crc kubenswrapper[4792]: E0121 18:18:11.706368 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/sg-core:latest" Jan 21 18:18:11 crc kubenswrapper[4792]: E0121 18:18:11.706592 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/infrawatch/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tttgp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_service-telemetry(5ed633e5-26cf-469c-b1b2-250229e6d602): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:18:11 crc kubenswrapper[4792]: E0121 18:18:11.707842 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" podUID="5ed633e5-26cf-469c-b1b2-250229e6d602" Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.329926 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="434bbf11-14ad-4d1d-91ba-f389391345ed" path="/var/lib/kubelet/pods/434bbf11-14ad-4d1d-91ba-f389391345ed/volumes" Jan 21 18:18:12 crc kubenswrapper[4792]: E0121 18:18:12.474661 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" podUID="c9b8bebe-7f04-4c45-9455-60313f44f51e" Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.602928 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerStarted","Data":"a72b21d991cf7daf9d3041f1637ef1f453de93131f5d92be5994072b29dfcedd"} Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.606325 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" event={"ID":"806a485e-8ece-4663-b323-8f01b038d9aa","Type":"ContainerStarted","Data":"fd6ea181d20d98af8b6ca9e77466370cae68e535ce5ea28e35c7fc1024f2d550"} Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.606387 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" event={"ID":"806a485e-8ece-4663-b323-8f01b038d9aa","Type":"ContainerStarted","Data":"c2ff3dd07d504891a66256c96e0b5fa297b2f9bfced4d9640fbe6e62f181b746"} Jan 21 18:18:12 crc kubenswrapper[4792]: E0121 18:18:12.609013 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" podUID="c9b8bebe-7f04-4c45-9455-60313f44f51e" Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.609662 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerStarted","Data":"27afa61c84e25bf12b629bb9742147a1b2cd76afa6a0d0215a8d69de22b21095"} Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.609990 4792 scope.go:117] "RemoveContainer" containerID="98b25a80124c21f167806e9946495d135b93f21ae45964ca9f5892058516ff02" Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.610972 4792 scope.go:117] "RemoveContainer" containerID="af0ad3a3f071bdf58e4fca71904fc2ba8f13ffbfffc48f4776179e1ba0a5d389" Jan 21 18:18:12 crc kubenswrapper[4792]: I0121 18:18:12.669934 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-ncxjb" podStartSLOduration=5.669895184 podStartE2EDuration="5.669895184s" podCreationTimestamp="2026-01-21 18:18:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:18:12.651875909 +0000 UTC m=+1326.633839105" watchObservedRunningTime="2026-01-21 18:18:12.669895184 +0000 UTC m=+1326.651858370" Jan 21 18:18:12 crc kubenswrapper[4792]: E0121 18:18:12.696182 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" podUID="e991533b-8af1-438b-99fb-31ed1b1db4d9" Jan 21 18:18:13 crc kubenswrapper[4792]: E0121 18:18:13.318659 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" podUID="5ed633e5-26cf-469c-b1b2-250229e6d602" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.633080 4792 generic.go:334] "Generic (PLEG): container finished" podID="c9b8bebe-7f04-4c45-9455-60313f44f51e" containerID="a72b21d991cf7daf9d3041f1637ef1f453de93131f5d92be5994072b29dfcedd" exitCode=0 Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.633451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerDied","Data":"a72b21d991cf7daf9d3041f1637ef1f453de93131f5d92be5994072b29dfcedd"} Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.633550 4792 scope.go:117] "RemoveContainer" containerID="d9996ec3ea8d694a7f288dde7c8fab2db06e2e7555ed0233475379664ea6285d" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.634768 4792 scope.go:117] "RemoveContainer" containerID="a72b21d991cf7daf9d3041f1637ef1f453de93131f5d92be5994072b29dfcedd" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.640366 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"60c4edbe0deb92db9952211e6b8e987c03df1ef994761031be168594771af7c0"} Jan 21 18:18:13 crc kubenswrapper[4792]: E0121 18:18:13.641308 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_service-telemetry(c9b8bebe-7f04-4c45-9455-60313f44f51e)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" podUID="c9b8bebe-7f04-4c45-9455-60313f44f51e" Jan 21 18:18:13 crc kubenswrapper[4792]: E0121 18:18:13.644572 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" podUID="5ed633e5-26cf-469c-b1b2-250229e6d602" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.646833 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerStarted","Data":"8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e"} Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.657577 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerStarted","Data":"4732e376ff5007b2c85021d021a79b9d6f05bee10440615702ec7b4f6c8c5209"} Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.661028 4792 scope.go:117] "RemoveContainer" containerID="822fb7a0af06d21d964303b00c4ae1f472df3639ec2da0ff50e5c198c87d266b" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.665825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"2026d59744b4919d088c7bae833bfbe10fc7cdcda5089f2e7bfa9534db4a6aca"} Jan 21 18:18:13 crc kubenswrapper[4792]: E0121 18:18:13.670457 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" podUID="e991533b-8af1-438b-99fb-31ed1b1db4d9" Jan 21 18:18:13 crc kubenswrapper[4792]: I0121 18:18:13.715294 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" podStartSLOduration=5.139760516 podStartE2EDuration="26.71527429s" podCreationTimestamp="2026-01-21 18:17:47 +0000 UTC" firstStartedPulling="2026-01-21 18:17:51.54657214 +0000 UTC m=+1305.528535326" lastFinishedPulling="2026-01-21 18:18:13.122085904 +0000 UTC m=+1327.104049100" observedRunningTime="2026-01-21 18:18:13.695178518 +0000 UTC m=+1327.677141704" watchObservedRunningTime="2026-01-21 18:18:13.71527429 +0000 UTC m=+1327.697237476" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.679918 4792 generic.go:334] "Generic (PLEG): container finished" podID="1007e823-8fe4-4892-9891-3a9ac8df0d23" containerID="8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e" exitCode=0 Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.680312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerDied","Data":"8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e"} Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.680357 4792 scope.go:117] "RemoveContainer" containerID="af0ad3a3f071bdf58e4fca71904fc2ba8f13ffbfffc48f4776179e1ba0a5d389" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.680768 4792 scope.go:117] "RemoveContainer" containerID="8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e" Jan 21 18:18:14 crc kubenswrapper[4792]: E0121 18:18:14.681124 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_service-telemetry(1007e823-8fe4-4892-9891-3a9ac8df0d23)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" podUID="1007e823-8fe4-4892-9891-3a9ac8df0d23" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.685304 4792 generic.go:334] "Generic (PLEG): container finished" podID="5ed633e5-26cf-469c-b1b2-250229e6d602" containerID="60c4edbe0deb92db9952211e6b8e987c03df1ef994761031be168594771af7c0" exitCode=0 Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.685403 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerDied","Data":"60c4edbe0deb92db9952211e6b8e987c03df1ef994761031be168594771af7c0"} Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.686208 4792 scope.go:117] "RemoveContainer" containerID="60c4edbe0deb92db9952211e6b8e987c03df1ef994761031be168594771af7c0" Jan 21 18:18:14 crc kubenswrapper[4792]: E0121 18:18:14.689765 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_service-telemetry(5ed633e5-26cf-469c-b1b2-250229e6d602)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" podUID="5ed633e5-26cf-469c-b1b2-250229e6d602" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.690990 4792 generic.go:334] "Generic (PLEG): container finished" podID="e991533b-8af1-438b-99fb-31ed1b1db4d9" containerID="2026d59744b4919d088c7bae833bfbe10fc7cdcda5089f2e7bfa9534db4a6aca" exitCode=0 Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.691030 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerDied","Data":"2026d59744b4919d088c7bae833bfbe10fc7cdcda5089f2e7bfa9534db4a6aca"} Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.691679 4792 scope.go:117] "RemoveContainer" containerID="2026d59744b4919d088c7bae833bfbe10fc7cdcda5089f2e7bfa9534db4a6aca" Jan 21 18:18:14 crc kubenswrapper[4792]: E0121 18:18:14.693515 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_service-telemetry(e991533b-8af1-438b-99fb-31ed1b1db4d9)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" podUID="e991533b-8af1-438b-99fb-31ed1b1db4d9" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.736085 4792 scope.go:117] "RemoveContainer" containerID="98b25a80124c21f167806e9946495d135b93f21ae45964ca9f5892058516ff02" Jan 21 18:18:14 crc kubenswrapper[4792]: I0121 18:18:14.778581 4792 scope.go:117] "RemoveContainer" containerID="6109e06c230e7a62c4362813822645a01ce87b2ee98c74d4b39419907cae633c" Jan 21 18:18:15 crc kubenswrapper[4792]: I0121 18:18:15.703563 4792 scope.go:117] "RemoveContainer" containerID="8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e" Jan 21 18:18:15 crc kubenswrapper[4792]: E0121 18:18:15.704143 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_service-telemetry(1007e823-8fe4-4892-9891-3a9ac8df0d23)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" podUID="1007e823-8fe4-4892-9891-3a9ac8df0d23" Jan 21 18:18:15 crc kubenswrapper[4792]: I0121 18:18:15.707986 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" event={"ID":"3a86979f-5891-42c1-89ce-6b0aefd1af14","Type":"ContainerStarted","Data":"339bc42cc733811947a28f1c766c228ad2c0c75d258c9dadc90428c45921e78d"} Jan 21 18:18:15 crc kubenswrapper[4792]: I0121 18:18:15.749143 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs" podStartSLOduration=3.934730055 podStartE2EDuration="47.749124056s" podCreationTimestamp="2026-01-21 18:17:28 +0000 UTC" firstStartedPulling="2026-01-21 18:17:30.769887536 +0000 UTC m=+1284.751850722" lastFinishedPulling="2026-01-21 18:18:14.584281537 +0000 UTC m=+1328.566244723" observedRunningTime="2026-01-21 18:18:15.74383255 +0000 UTC m=+1329.725795736" watchObservedRunningTime="2026-01-21 18:18:15.749124056 +0000 UTC m=+1329.731087252" Jan 21 18:18:24 crc kubenswrapper[4792]: I0121 18:18:24.246995 4792 scope.go:117] "RemoveContainer" containerID="a72b21d991cf7daf9d3041f1637ef1f453de93131f5d92be5994072b29dfcedd" Jan 21 18:18:25 crc kubenswrapper[4792]: I0121 18:18:25.782772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerStarted","Data":"c136801980ff36fde341ed3561a0b095bca60512152d821f1e7023be9778063a"} Jan 21 18:18:25 crc kubenswrapper[4792]: I0121 18:18:25.783133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" event={"ID":"c9b8bebe-7f04-4c45-9455-60313f44f51e","Type":"ContainerStarted","Data":"298624cc1ee7232b1ea32ed57f4ac73c531053021b201fb7ab0825b120395984"} Jan 21 18:18:25 crc kubenswrapper[4792]: I0121 18:18:25.808419 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn" podStartSLOduration=2.0842630619999998 podStartE2EDuration="40.808397023s" podCreationTimestamp="2026-01-21 18:17:45 +0000 UTC" firstStartedPulling="2026-01-21 18:17:46.756483392 +0000 UTC m=+1300.738446578" lastFinishedPulling="2026-01-21 18:18:25.480617353 +0000 UTC m=+1339.462580539" observedRunningTime="2026-01-21 18:18:25.804545877 +0000 UTC m=+1339.786509063" watchObservedRunningTime="2026-01-21 18:18:25.808397023 +0000 UTC m=+1339.790360209" Jan 21 18:18:26 crc kubenswrapper[4792]: I0121 18:18:26.250986 4792 scope.go:117] "RemoveContainer" containerID="8517452fa84d253eaedb00684e62e7934c9ced6b872fd1f4324222194b265d3e" Jan 21 18:18:27 crc kubenswrapper[4792]: I0121 18:18:27.247572 4792 scope.go:117] "RemoveContainer" containerID="60c4edbe0deb92db9952211e6b8e987c03df1ef994761031be168594771af7c0" Jan 21 18:18:27 crc kubenswrapper[4792]: I0121 18:18:27.802999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f99f584f7-bt694" event={"ID":"1007e823-8fe4-4892-9891-3a9ac8df0d23","Type":"ContainerStarted","Data":"6f33838fbb141d73951775e279dab2bd2bbe39e6d3723aac667b3e4f0f8e20ed"} Jan 21 18:18:28 crc kubenswrapper[4792]: I0121 18:18:28.246962 4792 scope.go:117] "RemoveContainer" containerID="2026d59744b4919d088c7bae833bfbe10fc7cdcda5089f2e7bfa9534db4a6aca" Jan 21 18:18:28 crc kubenswrapper[4792]: I0121 18:18:28.825609 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"ec7f2463e63cf9fc8a2ac6f6cbaac6d8801f157c5770c8a3a307a994e68d7c22"} Jan 21 18:18:28 crc kubenswrapper[4792]: I0121 18:18:28.825680 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" event={"ID":"5ed633e5-26cf-469c-b1b2-250229e6d602","Type":"ContainerStarted","Data":"d9be7b4ae097f03de5b7bb04821b918e8890c9d8ef7d1bac182abe5aa82be448"} Jan 21 18:18:28 crc kubenswrapper[4792]: I0121 18:18:28.855131 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8" podStartSLOduration=4.326357035 podStartE2EDuration="1m3.85510815s" podCreationTimestamp="2026-01-21 18:17:25 +0000 UTC" firstStartedPulling="2026-01-21 18:17:29.027781409 +0000 UTC m=+1283.009744585" lastFinishedPulling="2026-01-21 18:18:28.556532524 +0000 UTC m=+1342.538495700" observedRunningTime="2026-01-21 18:18:28.847641376 +0000 UTC m=+1342.829604572" watchObservedRunningTime="2026-01-21 18:18:28.85510815 +0000 UTC m=+1342.837071336" Jan 21 18:18:29 crc kubenswrapper[4792]: I0121 18:18:29.836653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"5fb4f30ff3ae2f7056773c3460937357fe9daa0a2d589a35ac4f47f13fe40e47"} Jan 21 18:18:29 crc kubenswrapper[4792]: I0121 18:18:29.838284 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" event={"ID":"e991533b-8af1-438b-99fb-31ed1b1db4d9","Type":"ContainerStarted","Data":"1f9d4c861bebff3952c38d276583e6e67421ceba237ee4a046bc8ae42139bc9f"} Jan 21 18:18:29 crc kubenswrapper[4792]: I0121 18:18:29.860345 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj" podStartSLOduration=7.36534673 podStartE2EDuration="54.860321382s" podCreationTimestamp="2026-01-21 18:17:35 +0000 UTC" firstStartedPulling="2026-01-21 18:17:41.809998524 +0000 UTC m=+1295.791961710" lastFinishedPulling="2026-01-21 18:18:29.304973176 +0000 UTC m=+1343.286936362" observedRunningTime="2026-01-21 18:18:29.855961572 +0000 UTC m=+1343.837924778" watchObservedRunningTime="2026-01-21 18:18:29.860321382 +0000 UTC m=+1343.842284568" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.491319 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.494105 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.500544 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.501023 4792 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.528359 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.531797 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/17d54200-f40a-4588-9438-2e2278f193ae-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.531934 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbshr\" (UniqueName: \"kubernetes.io/projected/17d54200-f40a-4588-9438-2e2278f193ae-kube-api-access-tbshr\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.532124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/17d54200-f40a-4588-9438-2e2278f193ae-qdr-test-config\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.634018 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/17d54200-f40a-4588-9438-2e2278f193ae-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.634107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbshr\" (UniqueName: \"kubernetes.io/projected/17d54200-f40a-4588-9438-2e2278f193ae-kube-api-access-tbshr\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.634217 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/17d54200-f40a-4588-9438-2e2278f193ae-qdr-test-config\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.635202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/17d54200-f40a-4588-9438-2e2278f193ae-qdr-test-config\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.642193 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/17d54200-f40a-4588-9438-2e2278f193ae-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.656057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbshr\" (UniqueName: \"kubernetes.io/projected/17d54200-f40a-4588-9438-2e2278f193ae-kube-api-access-tbshr\") pod \"qdr-test\" (UID: \"17d54200-f40a-4588-9438-2e2278f193ae\") " pod="service-telemetry/qdr-test" Jan 21 18:18:39 crc kubenswrapper[4792]: I0121 18:18:39.820540 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 21 18:18:40 crc kubenswrapper[4792]: I0121 18:18:40.043620 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 21 18:18:40 crc kubenswrapper[4792]: I0121 18:18:40.928305 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"17d54200-f40a-4588-9438-2e2278f193ae","Type":"ContainerStarted","Data":"76c903f90577cba24404787711ad87938a04bb8cf16318f78e4fd5642e5ecec9"} Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.006804 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"17d54200-f40a-4588-9438-2e2278f193ae","Type":"ContainerStarted","Data":"1672b222543b5e418320b6a64435b2c88d69abefcbbff594a5f3437b0074ceeb"} Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.033664 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.682332814 podStartE2EDuration="9.033606155s" podCreationTimestamp="2026-01-21 18:18:39 +0000 UTC" firstStartedPulling="2026-01-21 18:18:40.059882165 +0000 UTC m=+1354.041845351" lastFinishedPulling="2026-01-21 18:18:47.411155496 +0000 UTC m=+1361.393118692" observedRunningTime="2026-01-21 18:18:48.027557039 +0000 UTC m=+1362.009520235" watchObservedRunningTime="2026-01-21 18:18:48.033606155 +0000 UTC m=+1362.015569341" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.350161 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-59fq9"] Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.351386 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.354481 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.354503 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.355001 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.355136 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.355191 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.355147 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.370038 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-59fq9"] Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.461931 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462005 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462073 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmgkj\" (UniqueName: \"kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462217 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.462254 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.563797 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmgkj\" (UniqueName: \"kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564243 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564295 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564338 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564400 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.564421 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.565041 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.565293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.565900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.565984 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.566647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.567171 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.604135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmgkj\" (UniqueName: \"kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj\") pod \"stf-smoketest-smoke1-59fq9\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.756546 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.757525 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.763690 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.766579 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfq4p\" (UniqueName: \"kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p\") pod \"curl\" (UID: \"2559813b-fa2d-493b-92fe-157638dbcba1\") " pod="service-telemetry/curl" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.771625 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.868361 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfq4p\" (UniqueName: \"kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p\") pod \"curl\" (UID: \"2559813b-fa2d-493b-92fe-157638dbcba1\") " pod="service-telemetry/curl" Jan 21 18:18:48 crc kubenswrapper[4792]: I0121 18:18:48.894124 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfq4p\" (UniqueName: \"kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p\") pod \"curl\" (UID: \"2559813b-fa2d-493b-92fe-157638dbcba1\") " pod="service-telemetry/curl" Jan 21 18:18:49 crc kubenswrapper[4792]: I0121 18:18:49.030649 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-59fq9"] Jan 21 18:18:49 crc kubenswrapper[4792]: W0121 18:18:49.036307 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90065060_6909_4d1d_9dbd_a0562b9e6a05.slice/crio-7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a WatchSource:0}: Error finding container 7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a: Status 404 returned error can't find the container with id 7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a Jan 21 18:18:49 crc kubenswrapper[4792]: I0121 18:18:49.149596 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 21 18:18:49 crc kubenswrapper[4792]: I0121 18:18:49.380892 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 21 18:18:50 crc kubenswrapper[4792]: I0121 18:18:50.035281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2559813b-fa2d-493b-92fe-157638dbcba1","Type":"ContainerStarted","Data":"b69697652b8a2f9a199e1d6c021e0b12d89b0a46db5305e8a0fcea3aee282803"} Jan 21 18:18:50 crc kubenswrapper[4792]: I0121 18:18:50.038445 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerStarted","Data":"7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a"} Jan 21 18:18:53 crc kubenswrapper[4792]: I0121 18:18:53.571715 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:18:53 crc kubenswrapper[4792]: I0121 18:18:53.572515 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.196437 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.198127 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.230507 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.318745 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkk47\" (UniqueName: \"kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47\") pod \"service-telemetry-framework-operators-dmq8z\" (UID: \"4bddbc18-b172-4795-a2c4-b771c7f5dea1\") " pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.420484 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkk47\" (UniqueName: \"kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47\") pod \"service-telemetry-framework-operators-dmq8z\" (UID: \"4bddbc18-b172-4795-a2c4-b771c7f5dea1\") " pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.441037 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkk47\" (UniqueName: \"kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47\") pod \"service-telemetry-framework-operators-dmq8z\" (UID: \"4bddbc18-b172-4795-a2c4-b771c7f5dea1\") " pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:11 crc kubenswrapper[4792]: I0121 18:19:11.530598 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:18 crc kubenswrapper[4792]: E0121 18:19:18.932005 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/tripleomastercentos9/openstack-collectd:current-tripleo" Jan 21 18:19:18 crc kubenswrapper[4792]: E0121 18:19:18.932700 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:smoketest-collectd,Image:quay.io/tripleomastercentos9/openstack-collectd:current-tripleo,Command:[/smoketest_collectd_entrypoint.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CLOUDNAME,Value:smoke1,ValueFrom:nil,},EnvVar{Name:ELASTICSEARCH_AUTH_PASS,Value:BhbJcTT3p54OzOFQu64lZHKd,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_AUTH_TOKEN,Value:eyJhbGciOiJSUzI1NiIsImtpZCI6InF6SnFxNFFjbVk5VmJQZ2dNMmUxdHFmTlJlVWx4UDhSTlhIamV3RUx4WU0ifQ.eyJhdWQiOlsiaHR0cHM6Ly9rdWJlcm5ldGVzLmRlZmF1bHQuc3ZjIl0sImV4cCI6MTc2OTAyMzExNCwiaWF0IjoxNzY5MDE5NTE0LCJpc3MiOiJodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmMiLCJqdGkiOiJlM2NmNTZlMy1kYjgyLTQ0OTAtOTVmYy0yMmNiMDFhMjkzNDAiLCJrdWJlcm5ldGVzLmlvIjp7Im5hbWVzcGFjZSI6InNlcnZpY2UtdGVsZW1ldHJ5Iiwic2VydmljZWFjY291bnQiOnsibmFtZSI6InN0Zi1wcm9tZXRoZXVzLXJlYWRlciIsInVpZCI6ImY4OGE5ODRlLWQ5MTktNDhmNC1hODRmLTI3MTJmM2U5OWNjYiJ9fSwibmJmIjoxNzY5MDE5NTE0LCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6c2VydmljZS10ZWxlbWV0cnk6c3RmLXByb21ldGhldXMtcmVhZGVyIn0.AFh16ZIZgVAbbXK5_g1wVn4kx4k41j1P6cT4CFYjJNUMFzq0EXZVuWCujySjr0PwGSeeMzAExIFOccVAqxtkHzAvWGdU2r2_Wcf3F6KhBycJX29KrSeGM4YaltyQGZ2VKUe6rMfAr4HJf_PXWk7AU2GxcFtLv9Umw1JayEeyQfmCQCqV_vHbMgI6gEjbCTgjOs9P7NO6D7wJSPXDszSBQfUPoxMjWMgHr-9agR4y2TrAHotrAoAesnnHhXE_9YN8uUWcOxkm0r46ok40e7YPnnxsR72_KwlZgMN07rsCTSuraKxdMuMrcTutjG04AH3m5yR71WGF982zCl_TfrHH7kjRe43Md3XqXBoVfzqqsVjRqcbIvQCL5UakHFfy-CYMItThdo71MnPCQAyL8yh1xfmbtjVSjH_ISTIRqbVYZzlVgD-AknsVqr56JzON1mZ5z9Igd4qU2AcBL7kLoBptFi2WYwGw0Luii0kKKUlBdsn4qfc4E5CkK6-6vxfZ9K4kIamy4UE20nOTINyZObxBeR1-gztvsUQBXmFC18_DZSADKca3rRPaZrselZecnKrIzsgFKxCVD3EHZ4QY34xSCbsBEHyN6_fIAZDr0PxjVsrv1Pca9FFr0vW-0-Ws8c3JPAZpo-n3TvL0d0Hwy93uyqLSnTYz1gPjFc3V-VgiLtw,ValueFrom:nil,},EnvVar{Name:OBSERVABILITY_STRATEGY,Value:<>,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:collectd-config,ReadOnly:false,MountPath:/etc/minimal-collectd.conf.template,SubPath:minimal-collectd.conf.template,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sensubility-config,ReadOnly:false,MountPath:/etc/collectd-sensubility.conf,SubPath:collectd-sensubility.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:healthcheck-log,ReadOnly:false,MountPath:/healthcheck.log,SubPath:healthcheck.log,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:collectd-entrypoint-script,ReadOnly:false,MountPath:/smoketest_collectd_entrypoint.sh,SubPath:smoketest_collectd_entrypoint.sh,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tmgkj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod stf-smoketest-smoke1-59fq9_service-telemetry(90065060-6909-4d1d-9dbd-a0562b9e6a05): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 18:19:19 crc kubenswrapper[4792]: I0121 18:19:19.277502 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2559813b-fa2d-493b-92fe-157638dbcba1","Type":"ContainerStarted","Data":"e59da218a2326a9ce347d149588d0dea62c349f9e343e457a5f36f1dfaa7434a"} Jan 21 18:19:19 crc kubenswrapper[4792]: I0121 18:19:19.298031 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/curl" podStartSLOduration=1.787779661 podStartE2EDuration="31.298010782s" podCreationTimestamp="2026-01-21 18:18:48 +0000 UTC" firstStartedPulling="2026-01-21 18:18:49.388137968 +0000 UTC m=+1363.370101154" lastFinishedPulling="2026-01-21 18:19:18.898369089 +0000 UTC m=+1392.880332275" observedRunningTime="2026-01-21 18:19:19.294703244 +0000 UTC m=+1393.276666450" watchObservedRunningTime="2026-01-21 18:19:19.298010782 +0000 UTC m=+1393.279973968" Jan 21 18:19:19 crc kubenswrapper[4792]: I0121 18:19:19.357701 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:19 crc kubenswrapper[4792]: I0121 18:19:19.408200 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-sfzvf_21374324-faf0-4c74-864a-8bf6b2819027/prometheus-webhook-snmp/0.log" Jan 21 18:19:19 crc kubenswrapper[4792]: W0121 18:19:19.422373 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bddbc18_b172_4795_a2c4_b771c7f5dea1.slice/crio-fa1758bd28383f9bb97fbda751dc221e1d66c80dae6a35cd5531627611be68ad WatchSource:0}: Error finding container fa1758bd28383f9bb97fbda751dc221e1d66c80dae6a35cd5531627611be68ad: Status 404 returned error can't find the container with id fa1758bd28383f9bb97fbda751dc221e1d66c80dae6a35cd5531627611be68ad Jan 21 18:19:20 crc kubenswrapper[4792]: I0121 18:19:20.291729 4792 generic.go:334] "Generic (PLEG): container finished" podID="2559813b-fa2d-493b-92fe-157638dbcba1" containerID="e59da218a2326a9ce347d149588d0dea62c349f9e343e457a5f36f1dfaa7434a" exitCode=0 Jan 21 18:19:20 crc kubenswrapper[4792]: I0121 18:19:20.291780 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2559813b-fa2d-493b-92fe-157638dbcba1","Type":"ContainerDied","Data":"e59da218a2326a9ce347d149588d0dea62c349f9e343e457a5f36f1dfaa7434a"} Jan 21 18:19:20 crc kubenswrapper[4792]: I0121 18:19:20.294935 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" event={"ID":"4bddbc18-b172-4795-a2c4-b771c7f5dea1","Type":"ContainerStarted","Data":"a1fab037bb54e22152bb60b850daaa11f537039038329af3a29a3a51c4199dfa"} Jan 21 18:19:20 crc kubenswrapper[4792]: I0121 18:19:20.294991 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" event={"ID":"4bddbc18-b172-4795-a2c4-b771c7f5dea1","Type":"ContainerStarted","Data":"fa1758bd28383f9bb97fbda751dc221e1d66c80dae6a35cd5531627611be68ad"} Jan 21 18:19:20 crc kubenswrapper[4792]: I0121 18:19:20.332960 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" podStartSLOduration=9.133365846 podStartE2EDuration="9.332932446s" podCreationTimestamp="2026-01-21 18:19:11 +0000 UTC" firstStartedPulling="2026-01-21 18:19:19.426158261 +0000 UTC m=+1393.408121447" lastFinishedPulling="2026-01-21 18:19:19.625724861 +0000 UTC m=+1393.607688047" observedRunningTime="2026-01-21 18:19:20.327883742 +0000 UTC m=+1394.309846928" watchObservedRunningTime="2026-01-21 18:19:20.332932446 +0000 UTC m=+1394.314895632" Jan 21 18:19:21 crc kubenswrapper[4792]: E0121 18:19:21.464814 4792 kubelet_node_status.go:756] "Failed to set some node status fields" err="failed to validate nodeIP: route ip+net: no such network interface" node="crc" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.531393 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.531444 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.566734 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.627206 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.809307 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfq4p\" (UniqueName: \"kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p\") pod \"2559813b-fa2d-493b-92fe-157638dbcba1\" (UID: \"2559813b-fa2d-493b-92fe-157638dbcba1\") " Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.815106 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p" (OuterVolumeSpecName: "kube-api-access-qfq4p") pod "2559813b-fa2d-493b-92fe-157638dbcba1" (UID: "2559813b-fa2d-493b-92fe-157638dbcba1"). InnerVolumeSpecName "kube-api-access-qfq4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:19:21 crc kubenswrapper[4792]: I0121 18:19:21.911384 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfq4p\" (UniqueName: \"kubernetes.io/projected/2559813b-fa2d-493b-92fe-157638dbcba1-kube-api-access-qfq4p\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:22 crc kubenswrapper[4792]: I0121 18:19:22.320916 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2559813b-fa2d-493b-92fe-157638dbcba1","Type":"ContainerDied","Data":"b69697652b8a2f9a199e1d6c021e0b12d89b0a46db5305e8a0fcea3aee282803"} Jan 21 18:19:22 crc kubenswrapper[4792]: I0121 18:19:22.320938 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 21 18:19:22 crc kubenswrapper[4792]: I0121 18:19:22.321073 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b69697652b8a2f9a199e1d6c021e0b12d89b0a46db5305e8a0fcea3aee282803" Jan 21 18:19:23 crc kubenswrapper[4792]: I0121 18:19:23.571405 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:19:23 crc kubenswrapper[4792]: I0121 18:19:23.572381 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:19:31 crc kubenswrapper[4792]: I0121 18:19:31.562723 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:31 crc kubenswrapper[4792]: I0121 18:19:31.611119 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:32 crc kubenswrapper[4792]: E0121 18:19:32.164889 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/stf-smoketest-smoke1-59fq9" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" Jan 21 18:19:32 crc kubenswrapper[4792]: I0121 18:19:32.405581 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" podUID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" containerName="registry-server" containerID="cri-o://a1fab037bb54e22152bb60b850daaa11f537039038329af3a29a3a51c4199dfa" gracePeriod=2 Jan 21 18:19:32 crc kubenswrapper[4792]: I0121 18:19:32.407005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerStarted","Data":"2d53c4b0f6efa3734fbd3bd421d716491d88e3bde80eea693ec468f2cb302725"} Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.414543 4792 generic.go:334] "Generic (PLEG): container finished" podID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" containerID="a1fab037bb54e22152bb60b850daaa11f537039038329af3a29a3a51c4199dfa" exitCode=0 Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.414696 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" event={"ID":"4bddbc18-b172-4795-a2c4-b771c7f5dea1","Type":"ContainerDied","Data":"a1fab037bb54e22152bb60b850daaa11f537039038329af3a29a3a51c4199dfa"} Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.419278 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerStarted","Data":"2a23f9c63f2ca23cabb7d11d8a075acdf01cc3e59395a7df462d5595ad2c9873"} Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.436807 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.451165 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-59fq9" podStartSLOduration=1.545030323 podStartE2EDuration="45.451140091s" podCreationTimestamp="2026-01-21 18:18:48 +0000 UTC" firstStartedPulling="2026-01-21 18:18:49.038664571 +0000 UTC m=+1363.020627757" lastFinishedPulling="2026-01-21 18:19:32.944774339 +0000 UTC m=+1406.926737525" observedRunningTime="2026-01-21 18:19:33.4447113 +0000 UTC m=+1407.426674496" watchObservedRunningTime="2026-01-21 18:19:33.451140091 +0000 UTC m=+1407.433103277" Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.620290 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkk47\" (UniqueName: \"kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47\") pod \"4bddbc18-b172-4795-a2c4-b771c7f5dea1\" (UID: \"4bddbc18-b172-4795-a2c4-b771c7f5dea1\") " Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.626677 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47" (OuterVolumeSpecName: "kube-api-access-pkk47") pod "4bddbc18-b172-4795-a2c4-b771c7f5dea1" (UID: "4bddbc18-b172-4795-a2c4-b771c7f5dea1"). InnerVolumeSpecName "kube-api-access-pkk47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:19:33 crc kubenswrapper[4792]: I0121 18:19:33.723177 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkk47\" (UniqueName: \"kubernetes.io/projected/4bddbc18-b172-4795-a2c4-b771c7f5dea1-kube-api-access-pkk47\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:34 crc kubenswrapper[4792]: I0121 18:19:34.430355 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" Jan 21 18:19:34 crc kubenswrapper[4792]: I0121 18:19:34.430344 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-dmq8z" event={"ID":"4bddbc18-b172-4795-a2c4-b771c7f5dea1","Type":"ContainerDied","Data":"fa1758bd28383f9bb97fbda751dc221e1d66c80dae6a35cd5531627611be68ad"} Jan 21 18:19:34 crc kubenswrapper[4792]: I0121 18:19:34.430432 4792 scope.go:117] "RemoveContainer" containerID="a1fab037bb54e22152bb60b850daaa11f537039038329af3a29a3a51c4199dfa" Jan 21 18:19:34 crc kubenswrapper[4792]: I0121 18:19:34.461968 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:34 crc kubenswrapper[4792]: I0121 18:19:34.469314 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-dmq8z"] Jan 21 18:19:36 crc kubenswrapper[4792]: I0121 18:19:36.257569 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" path="/var/lib/kubelet/pods/4bddbc18-b172-4795-a2c4-b771c7f5dea1/volumes" Jan 21 18:19:49 crc kubenswrapper[4792]: I0121 18:19:49.566071 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-sfzvf_21374324-faf0-4c74-864a-8bf6b2819027/prometheus-webhook-snmp/0.log" Jan 21 18:19:53 crc kubenswrapper[4792]: I0121 18:19:53.570479 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:19:53 crc kubenswrapper[4792]: I0121 18:19:53.570815 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:19:53 crc kubenswrapper[4792]: I0121 18:19:53.570900 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:19:53 crc kubenswrapper[4792]: I0121 18:19:53.571610 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:19:53 crc kubenswrapper[4792]: I0121 18:19:53.571655 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3" gracePeriod=600 Jan 21 18:19:54 crc kubenswrapper[4792]: I0121 18:19:54.597560 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3" exitCode=0 Jan 21 18:19:54 crc kubenswrapper[4792]: I0121 18:19:54.597659 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3"} Jan 21 18:19:54 crc kubenswrapper[4792]: I0121 18:19:54.597891 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586"} Jan 21 18:19:54 crc kubenswrapper[4792]: I0121 18:19:54.597916 4792 scope.go:117] "RemoveContainer" containerID="81f61055a15b6c0de20c4bbce1d77d69823c921d5086b61e151446d2bb275b02" Jan 21 18:20:04 crc kubenswrapper[4792]: I0121 18:20:04.709811 4792 generic.go:334] "Generic (PLEG): container finished" podID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerID="2d53c4b0f6efa3734fbd3bd421d716491d88e3bde80eea693ec468f2cb302725" exitCode=0 Jan 21 18:20:04 crc kubenswrapper[4792]: I0121 18:20:04.709908 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerDied","Data":"2d53c4b0f6efa3734fbd3bd421d716491d88e3bde80eea693ec468f2cb302725"} Jan 21 18:20:04 crc kubenswrapper[4792]: I0121 18:20:04.711317 4792 scope.go:117] "RemoveContainer" containerID="2d53c4b0f6efa3734fbd3bd421d716491d88e3bde80eea693ec468f2cb302725" Jan 21 18:20:07 crc kubenswrapper[4792]: I0121 18:20:07.734896 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerDied","Data":"2a23f9c63f2ca23cabb7d11d8a075acdf01cc3e59395a7df462d5595ad2c9873"} Jan 21 18:20:07 crc kubenswrapper[4792]: I0121 18:20:07.734918 4792 generic.go:334] "Generic (PLEG): container finished" podID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerID="2a23f9c63f2ca23cabb7d11d8a075acdf01cc3e59395a7df462d5595ad2c9873" exitCode=1 Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.056414 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.172476 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.172561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.172609 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.174020 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.174130 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.174836 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmgkj\" (UniqueName: \"kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.175040 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log\") pod \"90065060-6909-4d1d-9dbd-a0562b9e6a05\" (UID: \"90065060-6909-4d1d-9dbd-a0562b9e6a05\") " Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.186188 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj" (OuterVolumeSpecName: "kube-api-access-tmgkj") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "kube-api-access-tmgkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.192615 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.192804 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.192881 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.195001 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.195392 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.202778 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "90065060-6909-4d1d-9dbd-a0562b9e6a05" (UID: "90065060-6909-4d1d-9dbd-a0562b9e6a05"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.277082 4792 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.278713 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.278817 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.278911 4792 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.278975 4792 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.279028 4792 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/90065060-6909-4d1d-9dbd-a0562b9e6a05-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.279080 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmgkj\" (UniqueName: \"kubernetes.io/projected/90065060-6909-4d1d-9dbd-a0562b9e6a05-kube-api-access-tmgkj\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.753413 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-59fq9" event={"ID":"90065060-6909-4d1d-9dbd-a0562b9e6a05","Type":"ContainerDied","Data":"7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a"} Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.753454 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7141d75300a6463767a89dde033d55634889e37c0ad499d640f5b8b41d26ba1a" Jan 21 18:20:09 crc kubenswrapper[4792]: I0121 18:20:09.753462 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-59fq9" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.030016 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-gg2cc"] Jan 21 18:20:16 crc kubenswrapper[4792]: E0121 18:20:16.030736 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-collectd" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.030752 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-collectd" Jan 21 18:20:16 crc kubenswrapper[4792]: E0121 18:20:16.030768 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2559813b-fa2d-493b-92fe-157638dbcba1" containerName="curl" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.030776 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2559813b-fa2d-493b-92fe-157638dbcba1" containerName="curl" Jan 21 18:20:16 crc kubenswrapper[4792]: E0121 18:20:16.030811 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" containerName="registry-server" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.030819 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" containerName="registry-server" Jan 21 18:20:16 crc kubenswrapper[4792]: E0121 18:20:16.030834 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-ceilometer" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.030935 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-ceilometer" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.031098 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bddbc18-b172-4795-a2c4-b771c7f5dea1" containerName="registry-server" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.031119 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-ceilometer" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.031132 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2559813b-fa2d-493b-92fe-157638dbcba1" containerName="curl" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.031143 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="90065060-6909-4d1d-9dbd-a0562b9e6a05" containerName="smoketest-collectd" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.032093 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.035462 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.035480 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.035819 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.035904 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.036002 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.036044 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.046155 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-gg2cc"] Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.181690 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.181804 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.181867 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxb7f\" (UniqueName: \"kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.181916 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.181959 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.182015 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.182100 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283547 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283586 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283630 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283655 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283733 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.283768 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxb7f\" (UniqueName: \"kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.284763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.286564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.286703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.286739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.287007 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.287351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.306236 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxb7f\" (UniqueName: \"kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f\") pod \"stf-smoketest-smoke1-gg2cc\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.354055 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.569952 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-gg2cc"] Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.810652 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerStarted","Data":"98ced7392f20cc93376be8860c44725c90ad3ed4f1468c40b1f28ff246f7449e"} Jan 21 18:20:16 crc kubenswrapper[4792]: I0121 18:20:16.810709 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerStarted","Data":"cbbd8769c17a9c9c9c665ec3d3e9ec37bb465f5309b0e458781f3bf5d95cd07b"} Jan 21 18:20:17 crc kubenswrapper[4792]: I0121 18:20:17.820478 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerStarted","Data":"adfef027f2dd3a4053c5e9bf3ad138f3584304898ea6f6a4ee9c2809869d5050"} Jan 21 18:20:17 crc kubenswrapper[4792]: I0121 18:20:17.844043 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" podStartSLOduration=1.844020595 podStartE2EDuration="1.844020595s" podCreationTimestamp="2026-01-21 18:20:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:20:17.839607438 +0000 UTC m=+1451.821570624" watchObservedRunningTime="2026-01-21 18:20:17.844020595 +0000 UTC m=+1451.825983781" Jan 21 18:20:50 crc kubenswrapper[4792]: I0121 18:20:50.064257 4792 generic.go:334] "Generic (PLEG): container finished" podID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerID="adfef027f2dd3a4053c5e9bf3ad138f3584304898ea6f6a4ee9c2809869d5050" exitCode=0 Jan 21 18:20:50 crc kubenswrapper[4792]: I0121 18:20:50.064371 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerDied","Data":"adfef027f2dd3a4053c5e9bf3ad138f3584304898ea6f6a4ee9c2809869d5050"} Jan 21 18:20:50 crc kubenswrapper[4792]: I0121 18:20:50.065899 4792 scope.go:117] "RemoveContainer" containerID="adfef027f2dd3a4053c5e9bf3ad138f3584304898ea6f6a4ee9c2809869d5050" Jan 21 18:20:51 crc kubenswrapper[4792]: I0121 18:20:51.078353 4792 generic.go:334] "Generic (PLEG): container finished" podID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerID="98ced7392f20cc93376be8860c44725c90ad3ed4f1468c40b1f28ff246f7449e" exitCode=0 Jan 21 18:20:51 crc kubenswrapper[4792]: I0121 18:20:51.078460 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerDied","Data":"98ced7392f20cc93376be8860c44725c90ad3ed4f1468c40b1f28ff246f7449e"} Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.360513 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.400917 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401018 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401059 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401128 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401232 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxb7f\" (UniqueName: \"kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401315 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.401367 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config\") pod \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\" (UID: \"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00\") " Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.408905 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f" (OuterVolumeSpecName: "kube-api-access-qxb7f") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "kube-api-access-qxb7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.421625 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.422008 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.422556 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.424281 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.424344 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.428970 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" (UID: "96a633c0-79f4-430e-b0e6-9ae5d7cdcd00"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504171 4792 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504250 4792 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504265 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504285 4792 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504300 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxb7f\" (UniqueName: \"kubernetes.io/projected/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-kube-api-access-qxb7f\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504312 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:52 crc kubenswrapper[4792]: I0121 18:20:52.504324 4792 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/96a633c0-79f4-430e-b0e6-9ae5d7cdcd00-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:20:53 crc kubenswrapper[4792]: I0121 18:20:53.098981 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" event={"ID":"96a633c0-79f4-430e-b0e6-9ae5d7cdcd00","Type":"ContainerDied","Data":"cbbd8769c17a9c9c9c665ec3d3e9ec37bb465f5309b0e458781f3bf5d95cd07b"} Jan 21 18:20:53 crc kubenswrapper[4792]: I0121 18:20:53.099050 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbbd8769c17a9c9c9c665ec3d3e9ec37bb465f5309b0e458781f3bf5d95cd07b" Jan 21 18:20:53 crc kubenswrapper[4792]: I0121 18:20:53.099145 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-gg2cc" Jan 21 18:20:54 crc kubenswrapper[4792]: I0121 18:20:54.552277 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-59fq9_90065060-6909-4d1d-9dbd-a0562b9e6a05/smoketest-collectd/0.log" Jan 21 18:20:54 crc kubenswrapper[4792]: I0121 18:20:54.856966 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-59fq9_90065060-6909-4d1d-9dbd-a0562b9e6a05/smoketest-ceilometer/0.log" Jan 21 18:20:55 crc kubenswrapper[4792]: I0121 18:20:55.137836 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-ncxjb_806a485e-8ece-4663-b323-8f01b038d9aa/default-interconnect/0.log" Jan 21 18:20:55 crc kubenswrapper[4792]: I0121 18:20:55.419787 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/bridge/2.log" Jan 21 18:20:55 crc kubenswrapper[4792]: I0121 18:20:55.710268 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/sg-core/0.log" Jan 21 18:20:55 crc kubenswrapper[4792]: I0121 18:20:55.979349 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_c9b8bebe-7f04-4c45-9455-60313f44f51e/bridge/2.log" Jan 21 18:20:56 crc kubenswrapper[4792]: I0121 18:20:56.247980 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_c9b8bebe-7f04-4c45-9455-60313f44f51e/sg-core/0.log" Jan 21 18:20:56 crc kubenswrapper[4792]: I0121 18:20:56.564311 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/bridge/1.log" Jan 21 18:20:56 crc kubenswrapper[4792]: I0121 18:20:56.829718 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/sg-core/0.log" Jan 21 18:20:57 crc kubenswrapper[4792]: I0121 18:20:57.100958 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_1007e823-8fe4-4892-9891-3a9ac8df0d23/bridge/2.log" Jan 21 18:20:57 crc kubenswrapper[4792]: I0121 18:20:57.389997 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_1007e823-8fe4-4892-9891-3a9ac8df0d23/sg-core/0.log" Jan 21 18:20:57 crc kubenswrapper[4792]: I0121 18:20:57.685011 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/bridge/2.log" Jan 21 18:20:57 crc kubenswrapper[4792]: I0121 18:20:57.956537 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/sg-core/0.log" Jan 21 18:21:00 crc kubenswrapper[4792]: I0121 18:21:00.244402 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-ftf54_ed7cf8c1-4937-4143-b230-fdd1c474c3ac/operator/0.log" Jan 21 18:21:00 crc kubenswrapper[4792]: I0121 18:21:00.504342 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_6056a47c-877d-43bb-847d-8b48c0156a6a/prometheus/0.log" Jan 21 18:21:00 crc kubenswrapper[4792]: I0121 18:21:00.772345 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_ca7f01d9-70c3-477f-81cd-46e32d6fafa1/elasticsearch/0.log" Jan 21 18:21:01 crc kubenswrapper[4792]: I0121 18:21:01.034424 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-sfzvf_21374324-faf0-4c74-864a-8bf6b2819027/prometheus-webhook-snmp/0.log" Jan 21 18:21:01 crc kubenswrapper[4792]: I0121 18:21:01.288709 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_e8fa9d25-e034-4893-839b-70f33ad44b2e/alertmanager/0.log" Jan 21 18:21:15 crc kubenswrapper[4792]: I0121 18:21:15.745370 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-55b89ddfb9-5bwxr_3132bdf7-accb-4e05-833d-7765614688ea/operator/0.log" Jan 21 18:21:17 crc kubenswrapper[4792]: I0121 18:21:17.730672 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-ftf54_ed7cf8c1-4937-4143-b230-fdd1c474c3ac/operator/0.log" Jan 21 18:21:18 crc kubenswrapper[4792]: I0121 18:21:18.014572 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_17d54200-f40a-4588-9438-2e2278f193ae/qdr/0.log" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.241213 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-59xxx/must-gather-88l9w"] Jan 21 18:21:52 crc kubenswrapper[4792]: E0121 18:21:52.242172 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-ceilometer" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.242189 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-ceilometer" Jan 21 18:21:52 crc kubenswrapper[4792]: E0121 18:21:52.242213 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-collectd" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.242222 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-collectd" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.242369 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-ceilometer" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.242392 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a633c0-79f4-430e-b0e6-9ae5d7cdcd00" containerName="smoketest-collectd" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.243407 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.245868 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-59xxx"/"openshift-service-ca.crt" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.246131 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-59xxx"/"kube-root-ca.crt" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.283258 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-59xxx/must-gather-88l9w"] Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.326061 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-must-gather-output\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.326185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbwpt\" (UniqueName: \"kubernetes.io/projected/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-kube-api-access-fbwpt\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.427600 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-must-gather-output\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.427712 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbwpt\" (UniqueName: \"kubernetes.io/projected/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-kube-api-access-fbwpt\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.428468 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-must-gather-output\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.457390 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbwpt\" (UniqueName: \"kubernetes.io/projected/8ba396fe-8fb9-426c-8fe7-b470e836bdfc-kube-api-access-fbwpt\") pod \"must-gather-88l9w\" (UID: \"8ba396fe-8fb9-426c-8fe7-b470e836bdfc\") " pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:52 crc kubenswrapper[4792]: I0121 18:21:52.565188 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-59xxx/must-gather-88l9w" Jan 21 18:21:53 crc kubenswrapper[4792]: I0121 18:21:53.007752 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-59xxx/must-gather-88l9w"] Jan 21 18:21:53 crc kubenswrapper[4792]: I0121 18:21:53.016333 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:21:53 crc kubenswrapper[4792]: I0121 18:21:53.570820 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:21:53 crc kubenswrapper[4792]: I0121 18:21:53.570913 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:21:53 crc kubenswrapper[4792]: I0121 18:21:53.583961 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-59xxx/must-gather-88l9w" event={"ID":"8ba396fe-8fb9-426c-8fe7-b470e836bdfc","Type":"ContainerStarted","Data":"7a803b4823bd787263623664bd2ac00c69b5d57e4c5243daf30444b078e3a86f"} Jan 21 18:22:04 crc kubenswrapper[4792]: I0121 18:22:04.693028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-59xxx/must-gather-88l9w" event={"ID":"8ba396fe-8fb9-426c-8fe7-b470e836bdfc","Type":"ContainerStarted","Data":"ff0a2286fc9fa38f8dc002b1c895268e05872027b3f2fcc3ed8515d64960132e"} Jan 21 18:22:04 crc kubenswrapper[4792]: I0121 18:22:04.693639 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-59xxx/must-gather-88l9w" event={"ID":"8ba396fe-8fb9-426c-8fe7-b470e836bdfc","Type":"ContainerStarted","Data":"8639121df5fe48f0822f595077d484641ce33f03154caaca8f9f09acf5f636b2"} Jan 21 18:22:04 crc kubenswrapper[4792]: I0121 18:22:04.708239 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-59xxx/must-gather-88l9w" podStartSLOduration=2.220772294 podStartE2EDuration="12.708222098s" podCreationTimestamp="2026-01-21 18:21:52 +0000 UTC" firstStartedPulling="2026-01-21 18:21:53.016281588 +0000 UTC m=+1546.998244774" lastFinishedPulling="2026-01-21 18:22:03.503731392 +0000 UTC m=+1557.485694578" observedRunningTime="2026-01-21 18:22:04.707826227 +0000 UTC m=+1558.689789413" watchObservedRunningTime="2026-01-21 18:22:04.708222098 +0000 UTC m=+1558.690185284" Jan 21 18:22:17 crc kubenswrapper[4792]: I0121 18:22:17.287202 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/1.log" Jan 21 18:22:17 crc kubenswrapper[4792]: I0121 18:22:17.287510 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/0.log" Jan 21 18:22:17 crc kubenswrapper[4792]: I0121 18:22:17.302965 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zhchl_cb575284-6b4b-49ab-b314-ba85d494ef6c/kube-rbac-proxy/0.log" Jan 21 18:22:17 crc kubenswrapper[4792]: I0121 18:22:17.322512 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zhchl_cb575284-6b4b-49ab-b314-ba85d494ef6c/machine-api-operator/0.log" Jan 21 18:22:22 crc kubenswrapper[4792]: I0121 18:22:22.711703 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-gct4j_3e2aea4a-8b97-4875-8fc5-b4ad936e1708/cert-manager-controller/0.log" Jan 21 18:22:22 crc kubenswrapper[4792]: I0121 18:22:22.724892 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-twtgw_2ac35a90-e309-483c-8af0-1436820dd8ae/cert-manager-cainjector/0.log" Jan 21 18:22:22 crc kubenswrapper[4792]: I0121 18:22:22.735802 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-bkv49_3abbfb41-60d7-4691-a79f-c57aa65bd54e/cert-manager-webhook/0.log" Jan 21 18:22:23 crc kubenswrapper[4792]: I0121 18:22:23.571097 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:22:23 crc kubenswrapper[4792]: I0121 18:22:23.571446 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:22:27 crc kubenswrapper[4792]: I0121 18:22:27.926775 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-7chrm_83aca1d7-0d0d-48b0-9fcf-e63c7643ff01/prometheus-operator/0.log" Jan 21 18:22:27 crc kubenswrapper[4792]: I0121 18:22:27.945052 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5_ac3337f1-d2aa-4663-8ad8-13a3e1675c57/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:27 crc kubenswrapper[4792]: I0121 18:22:27.958749 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85_cf96d731-c320-48de-9b50-8983d34908e4/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:27 crc kubenswrapper[4792]: I0121 18:22:27.978370 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-c5qnh_52155773-3679-4730-b9e5-8906156cc494/operator/0.log" Jan 21 18:22:28 crc kubenswrapper[4792]: I0121 18:22:28.000507 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-qk94m_86888bdd-6434-4df1-9e87-276f89a48723/perses-operator/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.245999 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv_5e215ddb-3fd1-4111-bee0-7758814f6916/extract/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.270296 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv_5e215ddb-3fd1-4111-bee0-7758814f6916/util/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.331113 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931aq6mfv_5e215ddb-3fd1-4111-bee0-7758814f6916/pull/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.414382 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx_2bc48447-ec9a-4a17-b701-687d94cac18d/extract/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.428081 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx_2bc48447-ec9a-4a17-b701-687d94cac18d/util/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.444356 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxcfnx_2bc48447-ec9a-4a17-b701-687d94cac18d/pull/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.465739 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8_aa3174c8-ee84-485c-b192-a75530c69e75/extract/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.476514 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8_aa3174c8-ee84-485c-b192-a75530c69e75/util/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.489260 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e9z6c8_aa3174c8-ee84-485c-b192-a75530c69e75/pull/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.507808 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc_7cce3a91-d520-418a-86cf-68c8c34cdcb4/extract/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.516039 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc_7cce3a91-d520-418a-86cf-68c8c34cdcb4/util/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.526921 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f0864fsc_7cce3a91-d520-418a-86cf-68c8c34cdcb4/pull/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.830383 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9crgd_f8948a28-6967-4016-b98c-643621434b69/registry-server/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.837873 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9crgd_f8948a28-6967-4016-b98c-643621434b69/extract-utilities/0.log" Jan 21 18:22:33 crc kubenswrapper[4792]: I0121 18:22:33.849415 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9crgd_f8948a28-6967-4016-b98c-643621434b69/extract-content/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.311500 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gpzvv_0c48ac10-ac54-4582-9fd8-e739820affc6/registry-server/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.318600 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gpzvv_0c48ac10-ac54-4582-9fd8-e739820affc6/extract-utilities/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.328954 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gpzvv_0c48ac10-ac54-4582-9fd8-e739820affc6/extract-content/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.350467 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-nmngq_d21d6d31-01f6-40c1-817b-cc87735ae2f6/marketplace-operator/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.590099 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-z4zsz_1374e517-0337-44da-b942-0c7163208087/registry-server/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.596923 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-z4zsz_1374e517-0337-44da-b942-0c7163208087/extract-utilities/0.log" Jan 21 18:22:34 crc kubenswrapper[4792]: I0121 18:22:34.605261 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-z4zsz_1374e517-0337-44da-b942-0c7163208087/extract-content/0.log" Jan 21 18:22:38 crc kubenswrapper[4792]: I0121 18:22:38.983795 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-7chrm_83aca1d7-0d0d-48b0-9fcf-e63c7643ff01/prometheus-operator/0.log" Jan 21 18:22:38 crc kubenswrapper[4792]: I0121 18:22:38.997722 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5_ac3337f1-d2aa-4663-8ad8-13a3e1675c57/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:39 crc kubenswrapper[4792]: I0121 18:22:39.013467 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85_cf96d731-c320-48de-9b50-8983d34908e4/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:39 crc kubenswrapper[4792]: I0121 18:22:39.033777 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-c5qnh_52155773-3679-4730-b9e5-8906156cc494/operator/0.log" Jan 21 18:22:39 crc kubenswrapper[4792]: I0121 18:22:39.047159 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-qk94m_86888bdd-6434-4df1-9e87-276f89a48723/perses-operator/0.log" Jan 21 18:22:49 crc kubenswrapper[4792]: I0121 18:22:49.842290 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-7chrm_83aca1d7-0d0d-48b0-9fcf-e63c7643ff01/prometheus-operator/0.log" Jan 21 18:22:49 crc kubenswrapper[4792]: I0121 18:22:49.867377 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-8vww5_ac3337f1-d2aa-4663-8ad8-13a3e1675c57/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:49 crc kubenswrapper[4792]: I0121 18:22:49.879989 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6f579cb64f-l5x85_cf96d731-c320-48de-9b50-8983d34908e4/prometheus-operator-admission-webhook/0.log" Jan 21 18:22:49 crc kubenswrapper[4792]: I0121 18:22:49.901310 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-c5qnh_52155773-3679-4730-b9e5-8906156cc494/operator/0.log" Jan 21 18:22:49 crc kubenswrapper[4792]: I0121 18:22:49.914513 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-qk94m_86888bdd-6434-4df1-9e87-276f89a48723/perses-operator/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.068166 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-gct4j_3e2aea4a-8b97-4875-8fc5-b4ad936e1708/cert-manager-controller/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.084511 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-twtgw_2ac35a90-e309-483c-8af0-1436820dd8ae/cert-manager-cainjector/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.098550 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-bkv49_3abbfb41-60d7-4691-a79f-c57aa65bd54e/cert-manager-webhook/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.808169 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-gct4j_3e2aea4a-8b97-4875-8fc5-b4ad936e1708/cert-manager-controller/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.821117 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-twtgw_2ac35a90-e309-483c-8af0-1436820dd8ae/cert-manager-cainjector/0.log" Jan 21 18:22:50 crc kubenswrapper[4792]: I0121 18:22:50.838303 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-bkv49_3abbfb41-60d7-4691-a79f-c57aa65bd54e/cert-manager-webhook/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.333696 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/1.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.334141 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-r2mgl_4a02b713-1a2c-43d9-9ed2-de57e40a2364/control-plane-machine-set-operator/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.353216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zhchl_cb575284-6b4b-49ab-b314-ba85d494ef6c/kube-rbac-proxy/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.364009 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zhchl_cb575284-6b4b-49ab-b314-ba85d494ef6c/machine-api-operator/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.893840 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj_84938c18-930b-4348-b2e8-e73ee66d0a08/extract/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.901151 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj_84938c18-930b-4348-b2e8-e73ee66d0a08/util/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.909748 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3bx6rbj_84938c18-930b-4348-b2e8-e73ee66d0a08/pull/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.924708 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_e8fa9d25-e034-4893-839b-70f33ad44b2e/alertmanager/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.933479 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_e8fa9d25-e034-4893-839b-70f33ad44b2e/config-reloader/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.942481 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_e8fa9d25-e034-4893-839b-70f33ad44b2e/oauth-proxy/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.952452 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_e8fa9d25-e034-4893-839b-70f33ad44b2e/init-config-reloader/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.963796 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm_98302177-ddc2-4102-925e-00c964486bb2/extract/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.972119 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm_98302177-ddc2-4102-925e-00c964486bb2/util/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.981576 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebhmrlm_98302177-ddc2-4102-925e-00c964486bb2/pull/0.log" Jan 21 18:22:51 crc kubenswrapper[4792]: I0121 18:22:51.994112 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_2559813b-fa2d-493b-92fe-157638dbcba1/curl/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.003598 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_1007e823-8fe4-4892-9891-3a9ac8df0d23/bridge/2.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.003903 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_1007e823-8fe4-4892-9891-3a9ac8df0d23/bridge/1.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.008456 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f99f584f7-bt694_1007e823-8fe4-4892-9891-3a9ac8df0d23/sg-core/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.021233 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/oauth-proxy/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.032771 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/bridge/1.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.032993 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/bridge/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.039252 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-48xxs_3a86979f-5891-42c1-89ce-6b0aefd1af14/sg-core/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.057527 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_c9b8bebe-7f04-4c45-9455-60313f44f51e/bridge/2.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.058399 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_c9b8bebe-7f04-4c45-9455-60313f44f51e/bridge/1.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.066514 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-75d8d99c6f-wz9gn_c9b8bebe-7f04-4c45-9455-60313f44f51e/sg-core/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.077523 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/oauth-proxy/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.086598 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/bridge/2.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.087196 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/bridge/1.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.092930 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-clhb8_5ed633e5-26cf-469c-b1b2-250229e6d602/sg-core/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.107471 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/oauth-proxy/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.115739 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/bridge/2.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.115914 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/bridge/1.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.121197 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-d9nfj_e991533b-8af1-438b-99fb-31ed1b1db4d9/sg-core/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.140729 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-ncxjb_806a485e-8ece-4663-b323-8f01b038d9aa/default-interconnect/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.148280 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-sfzvf_21374324-faf0-4c74-864a-8bf6b2819027/prometheus-webhook-snmp/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.178915 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elastic-operator-c6c8576bf-2cs9f_4b21aeae-2f5f-4259-9fc7-185b0bbbd98e/manager/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.205178 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_ca7f01d9-70c3-477f-81cd-46e32d6fafa1/elasticsearch/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.215388 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_ca7f01d9-70c3-477f-81cd-46e32d6fafa1/elastic-internal-init-filesystem/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.222368 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_ca7f01d9-70c3-477f-81cd-46e32d6fafa1/elastic-internal-suspend/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.237590 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_interconnect-operator-5bb49f789d-qtp67_832ffd66-8482-4991-b666-ebc765fc5f8a/interconnect-operator/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.258787 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_6056a47c-877d-43bb-847d-8b48c0156a6a/prometheus/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.268240 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_6056a47c-877d-43bb-847d-8b48c0156a6a/config-reloader/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.277685 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_6056a47c-877d-43bb-847d-8b48c0156a6a/oauth-proxy/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.285890 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_6056a47c-877d-43bb-847d-8b48c0156a6a/init-config-reloader/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.304322 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_17d54200-f40a-4588-9438-2e2278f193ae/qdr/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.319025 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_fb43b2a1-30ee-488c-b6c5-e43874f03605/docker-build/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.324112 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_fb43b2a1-30ee-488c-b6c5-e43874f03605/git-clone/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.338040 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_fb43b2a1-30ee-488c-b6c5-e43874f03605/manage-dockerfile/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.360400 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-operators-dgnfp_3118ce29-fb3c-4dcc-8dc6-29c9f97a7969/registry-server/0.log" Jan 21 18:22:52 crc kubenswrapper[4792]: I0121 18:22:52.577636 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-55b89ddfb9-5bwxr_3132bdf7-accb-4e05-833d-7765614688ea/operator/0.log" Jan 21 18:22:53 crc kubenswrapper[4792]: I0121 18:22:53.570187 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:22:53 crc kubenswrapper[4792]: I0121 18:22:53.570329 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:22:53 crc kubenswrapper[4792]: I0121 18:22:53.570389 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:22:53 crc kubenswrapper[4792]: I0121 18:22:53.571151 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:22:53 crc kubenswrapper[4792]: I0121 18:22:53.571221 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" gracePeriod=600 Jan 21 18:22:53 crc kubenswrapper[4792]: E0121 18:22:53.700549 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.098981 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" exitCode=0 Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.099033 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586"} Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.099072 4792 scope.go:117] "RemoveContainer" containerID="73909500d6fe38233c9a225249850770aaf4af0c3c0a37a13ceb56094bc310c3" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.099631 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:22:54 crc kubenswrapper[4792]: E0121 18:22:54.100012 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.604341 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-ftf54_ed7cf8c1-4937-4143-b230-fdd1c474c3ac/operator/0.log" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.626814 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.628759 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.636828 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.638393 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-59fq9_90065060-6909-4d1d-9dbd-a0562b9e6a05/smoketest-collectd/0.log" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.645245 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-59fq9_90065060-6909-4d1d-9dbd-a0562b9e6a05/smoketest-ceilometer/0.log" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.675891 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-gg2cc_96a633c0-79f4-430e-b0e6-9ae5d7cdcd00/smoketest-collectd/0.log" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.682651 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-gg2cc_96a633c0-79f4-430e-b0e6-9ae5d7cdcd00/smoketest-ceilometer/0.log" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.802930 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.803377 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.803504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln4g6\" (UniqueName: \"kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.904779 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln4g6\" (UniqueName: \"kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.904903 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.905015 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.905692 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.905696 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.928754 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln4g6\" (UniqueName: \"kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6\") pod \"certified-operators-fvshc\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:54 crc kubenswrapper[4792]: I0121 18:22:54.983607 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:22:55 crc kubenswrapper[4792]: I0121 18:22:55.277643 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.139023 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerID="95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84" exitCode=0 Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.139077 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerDied","Data":"95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84"} Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.139133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerStarted","Data":"76062e98944fdf56cf445c0e6069d854295f95dc1be197a7a246ace7a364a521"} Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.410520 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/kube-multus-additional-cni-plugins/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.423052 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/egress-router-binary-copy/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.433135 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/cni-plugins/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.446983 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/bond-cni-plugin/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.468177 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/routeoverride-cni/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.475003 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/whereabouts-cni-bincopy/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.486178 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-6lc6z_d8728e15-00c6-4fa7-a79a-cee551b64c18/whereabouts-cni/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.508465 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-mrvfm_3d05043f-32a0-4b55-ac55-3f48b3c25bc5/multus-admission-controller/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.518481 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-mrvfm_3d05043f-32a0-4b55-ac55-3f48b3c25bc5/kube-rbac-proxy/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.548790 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/3.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.579208 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tvdgr_129c7cf1-6a9e-440a-8d4e-049c0652cf6e/kube-multus/2.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.606013 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-ddsqn_97b1a1f0-3533-44d9-8c10-9feb31d988ea/network-metrics-daemon/0.log" Jan 21 18:22:56 crc kubenswrapper[4792]: I0121 18:22:56.615271 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-ddsqn_97b1a1f0-3533-44d9-8c10-9feb31d988ea/kube-rbac-proxy/0.log" Jan 21 18:22:58 crc kubenswrapper[4792]: I0121 18:22:58.157125 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerID="48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356" exitCode=0 Jan 21 18:22:58 crc kubenswrapper[4792]: I0121 18:22:58.157177 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerDied","Data":"48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356"} Jan 21 18:22:59 crc kubenswrapper[4792]: I0121 18:22:59.170327 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerStarted","Data":"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192"} Jan 21 18:22:59 crc kubenswrapper[4792]: I0121 18:22:59.200042 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fvshc" podStartSLOduration=2.7948723859999998 podStartE2EDuration="5.200019235s" podCreationTimestamp="2026-01-21 18:22:54 +0000 UTC" firstStartedPulling="2026-01-21 18:22:56.142071468 +0000 UTC m=+1610.124034654" lastFinishedPulling="2026-01-21 18:22:58.547218317 +0000 UTC m=+1612.529181503" observedRunningTime="2026-01-21 18:22:59.194488868 +0000 UTC m=+1613.176452064" watchObservedRunningTime="2026-01-21 18:22:59.200019235 +0000 UTC m=+1613.181982421" Jan 21 18:23:04 crc kubenswrapper[4792]: I0121 18:23:04.984405 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:04 crc kubenswrapper[4792]: I0121 18:23:04.985032 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:05 crc kubenswrapper[4792]: I0121 18:23:05.037310 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:05 crc kubenswrapper[4792]: I0121 18:23:05.365291 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:05 crc kubenswrapper[4792]: I0121 18:23:05.428403 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:23:07 crc kubenswrapper[4792]: I0121 18:23:07.248281 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:23:07 crc kubenswrapper[4792]: E0121 18:23:07.249559 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:23:07 crc kubenswrapper[4792]: I0121 18:23:07.256977 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fvshc" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="registry-server" containerID="cri-o://88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192" gracePeriod=2 Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.809030 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.891778 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content\") pod \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.891891 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities\") pod \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.891938 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln4g6\" (UniqueName: \"kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6\") pod \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\" (UID: \"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67\") " Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.893159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities" (OuterVolumeSpecName: "utilities") pod "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" (UID: "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.899704 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6" (OuterVolumeSpecName: "kube-api-access-ln4g6") pod "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" (UID: "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67"). InnerVolumeSpecName "kube-api-access-ln4g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.959905 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" (UID: "c3ec1120-9f15-4a41-b55d-ab3a87ec5c67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.993554 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.993610 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln4g6\" (UniqueName: \"kubernetes.io/projected/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-kube-api-access-ln4g6\") on node \"crc\" DevicePath \"\"" Jan 21 18:23:08 crc kubenswrapper[4792]: I0121 18:23:08.993625 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.275809 4792 generic.go:334] "Generic (PLEG): container finished" podID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerID="88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192" exitCode=0 Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.275903 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerDied","Data":"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192"} Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.275957 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvshc" event={"ID":"c3ec1120-9f15-4a41-b55d-ab3a87ec5c67","Type":"ContainerDied","Data":"76062e98944fdf56cf445c0e6069d854295f95dc1be197a7a246ace7a364a521"} Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.275971 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvshc" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.275998 4792 scope.go:117] "RemoveContainer" containerID="88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.304587 4792 scope.go:117] "RemoveContainer" containerID="48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.316346 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.324121 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fvshc"] Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.338032 4792 scope.go:117] "RemoveContainer" containerID="95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.392564 4792 scope.go:117] "RemoveContainer" containerID="88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192" Jan 21 18:23:09 crc kubenswrapper[4792]: E0121 18:23:09.400508 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192\": container with ID starting with 88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192 not found: ID does not exist" containerID="88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.400565 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192"} err="failed to get container status \"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192\": rpc error: code = NotFound desc = could not find container \"88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192\": container with ID starting with 88d5d4755cacae425574c66b147cd9ffea2aae0ca7222ae429b3b36ec5872192 not found: ID does not exist" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.400595 4792 scope.go:117] "RemoveContainer" containerID="48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356" Jan 21 18:23:09 crc kubenswrapper[4792]: E0121 18:23:09.401121 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356\": container with ID starting with 48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356 not found: ID does not exist" containerID="48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.401168 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356"} err="failed to get container status \"48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356\": rpc error: code = NotFound desc = could not find container \"48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356\": container with ID starting with 48f59bd56b952863669187fd9e4637f0c0b21b8566bdb03d5413cd1110206356 not found: ID does not exist" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.401223 4792 scope.go:117] "RemoveContainer" containerID="95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84" Jan 21 18:23:09 crc kubenswrapper[4792]: E0121 18:23:09.401603 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84\": container with ID starting with 95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84 not found: ID does not exist" containerID="95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84" Jan 21 18:23:09 crc kubenswrapper[4792]: I0121 18:23:09.401630 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84"} err="failed to get container status \"95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84\": rpc error: code = NotFound desc = could not find container \"95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84\": container with ID starting with 95a215c76898ea8a74e9f10cdd371d1415f85fbc4f61fc648c235560993a8f84 not found: ID does not exist" Jan 21 18:23:10 crc kubenswrapper[4792]: I0121 18:23:10.256158 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" path="/var/lib/kubelet/pods/c3ec1120-9f15-4a41-b55d-ab3a87ec5c67/volumes" Jan 21 18:23:19 crc kubenswrapper[4792]: I0121 18:23:19.247293 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:23:19 crc kubenswrapper[4792]: E0121 18:23:19.247512 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:23:34 crc kubenswrapper[4792]: I0121 18:23:34.247160 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:23:34 crc kubenswrapper[4792]: E0121 18:23:34.250209 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:23:47 crc kubenswrapper[4792]: I0121 18:23:47.247005 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:23:47 crc kubenswrapper[4792]: E0121 18:23:47.247934 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:24:00 crc kubenswrapper[4792]: I0121 18:24:00.246681 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:24:00 crc kubenswrapper[4792]: E0121 18:24:00.248723 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.382666 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:10 crc kubenswrapper[4792]: E0121 18:24:10.383474 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="registry-server" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.383487 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="registry-server" Jan 21 18:24:10 crc kubenswrapper[4792]: E0121 18:24:10.383507 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="extract-utilities" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.383514 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="extract-utilities" Jan 21 18:24:10 crc kubenswrapper[4792]: E0121 18:24:10.383529 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="extract-content" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.383536 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="extract-content" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.383678 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ec1120-9f15-4a41-b55d-ab3a87ec5c67" containerName="registry-server" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.384684 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.404441 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.498902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5ct4\" (UniqueName: \"kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.499141 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.499194 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.600449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5ct4\" (UniqueName: \"kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.600609 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.600634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.601441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.601667 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.640882 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5ct4\" (UniqueName: \"kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4\") pod \"community-operators-86nzm\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:10 crc kubenswrapper[4792]: I0121 18:24:10.717435 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:11 crc kubenswrapper[4792]: I0121 18:24:11.034302 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:11 crc kubenswrapper[4792]: I0121 18:24:11.800382 4792 generic.go:334] "Generic (PLEG): container finished" podID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerID="0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957" exitCode=0 Jan 21 18:24:11 crc kubenswrapper[4792]: I0121 18:24:11.800426 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerDied","Data":"0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957"} Jan 21 18:24:11 crc kubenswrapper[4792]: I0121 18:24:11.800454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerStarted","Data":"08a307c60dcd46c8a9b56a6c0a0c120b6dd122075d5d1e67fae5c10d8664dee8"} Jan 21 18:24:12 crc kubenswrapper[4792]: I0121 18:24:12.816377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerStarted","Data":"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0"} Jan 21 18:24:13 crc kubenswrapper[4792]: I0121 18:24:13.825401 4792 generic.go:334] "Generic (PLEG): container finished" podID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerID="35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0" exitCode=0 Jan 21 18:24:13 crc kubenswrapper[4792]: I0121 18:24:13.825447 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerDied","Data":"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0"} Jan 21 18:24:14 crc kubenswrapper[4792]: I0121 18:24:14.834468 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerStarted","Data":"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23"} Jan 21 18:24:14 crc kubenswrapper[4792]: I0121 18:24:14.860323 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-86nzm" podStartSLOduration=2.402794365 podStartE2EDuration="4.860290715s" podCreationTimestamp="2026-01-21 18:24:10 +0000 UTC" firstStartedPulling="2026-01-21 18:24:11.803109561 +0000 UTC m=+1685.785072757" lastFinishedPulling="2026-01-21 18:24:14.260605921 +0000 UTC m=+1688.242569107" observedRunningTime="2026-01-21 18:24:14.854009144 +0000 UTC m=+1688.835972340" watchObservedRunningTime="2026-01-21 18:24:14.860290715 +0000 UTC m=+1688.842253891" Jan 21 18:24:15 crc kubenswrapper[4792]: I0121 18:24:15.247248 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:24:15 crc kubenswrapper[4792]: E0121 18:24:15.247533 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:24:20 crc kubenswrapper[4792]: I0121 18:24:20.717804 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:20 crc kubenswrapper[4792]: I0121 18:24:20.717872 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:20 crc kubenswrapper[4792]: I0121 18:24:20.766047 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:20 crc kubenswrapper[4792]: I0121 18:24:20.921565 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:21 crc kubenswrapper[4792]: I0121 18:24:21.005377 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:22 crc kubenswrapper[4792]: I0121 18:24:22.892230 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-86nzm" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="registry-server" containerID="cri-o://ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23" gracePeriod=2 Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.274512 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.348220 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content\") pod \"81d20f29-87d7-495c-962d-8bb96cf0d79e\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.348362 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities\") pod \"81d20f29-87d7-495c-962d-8bb96cf0d79e\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.349330 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities" (OuterVolumeSpecName: "utilities") pod "81d20f29-87d7-495c-962d-8bb96cf0d79e" (UID: "81d20f29-87d7-495c-962d-8bb96cf0d79e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.349447 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5ct4\" (UniqueName: \"kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4\") pod \"81d20f29-87d7-495c-962d-8bb96cf0d79e\" (UID: \"81d20f29-87d7-495c-962d-8bb96cf0d79e\") " Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.351500 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.372329 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4" (OuterVolumeSpecName: "kube-api-access-r5ct4") pod "81d20f29-87d7-495c-962d-8bb96cf0d79e" (UID: "81d20f29-87d7-495c-962d-8bb96cf0d79e"). InnerVolumeSpecName "kube-api-access-r5ct4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.409030 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81d20f29-87d7-495c-962d-8bb96cf0d79e" (UID: "81d20f29-87d7-495c-962d-8bb96cf0d79e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.453528 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d20f29-87d7-495c-962d-8bb96cf0d79e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.453588 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5ct4\" (UniqueName: \"kubernetes.io/projected/81d20f29-87d7-495c-962d-8bb96cf0d79e-kube-api-access-r5ct4\") on node \"crc\" DevicePath \"\"" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.908619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerDied","Data":"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23"} Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.908650 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86nzm" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.908708 4792 scope.go:117] "RemoveContainer" containerID="ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.908548 4792 generic.go:334] "Generic (PLEG): container finished" podID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerID="ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23" exitCode=0 Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.910559 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86nzm" event={"ID":"81d20f29-87d7-495c-962d-8bb96cf0d79e","Type":"ContainerDied","Data":"08a307c60dcd46c8a9b56a6c0a0c120b6dd122075d5d1e67fae5c10d8664dee8"} Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.947539 4792 scope.go:117] "RemoveContainer" containerID="35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.952333 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.959328 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-86nzm"] Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.969536 4792 scope.go:117] "RemoveContainer" containerID="0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.996409 4792 scope.go:117] "RemoveContainer" containerID="ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23" Jan 21 18:24:23 crc kubenswrapper[4792]: E0121 18:24:23.996992 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23\": container with ID starting with ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23 not found: ID does not exist" containerID="ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.997031 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23"} err="failed to get container status \"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23\": rpc error: code = NotFound desc = could not find container \"ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23\": container with ID starting with ae7691ca392a3125690aa5e6124faeef85e7e9c14954417871eb2a21215ffe23 not found: ID does not exist" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.997063 4792 scope.go:117] "RemoveContainer" containerID="35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0" Jan 21 18:24:23 crc kubenswrapper[4792]: E0121 18:24:23.998989 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0\": container with ID starting with 35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0 not found: ID does not exist" containerID="35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.999023 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0"} err="failed to get container status \"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0\": rpc error: code = NotFound desc = could not find container \"35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0\": container with ID starting with 35e7373717114a236720b39b0c05b0b438534729a99fe3e9183f9defe8d08ea0 not found: ID does not exist" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.999045 4792 scope.go:117] "RemoveContainer" containerID="0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957" Jan 21 18:24:23 crc kubenswrapper[4792]: E0121 18:24:23.999631 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957\": container with ID starting with 0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957 not found: ID does not exist" containerID="0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957" Jan 21 18:24:23 crc kubenswrapper[4792]: I0121 18:24:23.999664 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957"} err="failed to get container status \"0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957\": rpc error: code = NotFound desc = could not find container \"0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957\": container with ID starting with 0bbd1067c6ecb4e52f0f6f41d5af3d05af6dea131944d7a3c6c049dfb831e957 not found: ID does not exist" Jan 21 18:24:24 crc kubenswrapper[4792]: I0121 18:24:24.255944 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" path="/var/lib/kubelet/pods/81d20f29-87d7-495c-962d-8bb96cf0d79e/volumes" Jan 21 18:24:29 crc kubenswrapper[4792]: I0121 18:24:29.247154 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:24:29 crc kubenswrapper[4792]: E0121 18:24:29.247679 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.337257 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:32 crc kubenswrapper[4792]: E0121 18:24:32.338297 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="extract-content" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.338313 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="extract-content" Jan 21 18:24:32 crc kubenswrapper[4792]: E0121 18:24:32.338337 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="registry-server" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.338346 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="registry-server" Jan 21 18:24:32 crc kubenswrapper[4792]: E0121 18:24:32.338358 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="extract-utilities" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.338365 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="extract-utilities" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.338521 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d20f29-87d7-495c-962d-8bb96cf0d79e" containerName="registry-server" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.339116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.346740 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.414578 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9ksm\" (UniqueName: \"kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm\") pod \"service-telemetry-framework-operators-vbh2b\" (UID: \"aa8566fe-1a16-48a4-ba87-47a52deb248b\") " pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.517105 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9ksm\" (UniqueName: \"kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm\") pod \"service-telemetry-framework-operators-vbh2b\" (UID: \"aa8566fe-1a16-48a4-ba87-47a52deb248b\") " pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.545248 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9ksm\" (UniqueName: \"kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm\") pod \"service-telemetry-framework-operators-vbh2b\" (UID: \"aa8566fe-1a16-48a4-ba87-47a52deb248b\") " pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:32 crc kubenswrapper[4792]: I0121 18:24:32.664789 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:33 crc kubenswrapper[4792]: I0121 18:24:33.118630 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:34 crc kubenswrapper[4792]: I0121 18:24:34.011784 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" event={"ID":"aa8566fe-1a16-48a4-ba87-47a52deb248b","Type":"ContainerStarted","Data":"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512"} Jan 21 18:24:34 crc kubenswrapper[4792]: I0121 18:24:34.012194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" event={"ID":"aa8566fe-1a16-48a4-ba87-47a52deb248b","Type":"ContainerStarted","Data":"2dc3df3a169299690204ad8b7ff8ee9d9d1671d574266478c994635cf0a1dc5d"} Jan 21 18:24:34 crc kubenswrapper[4792]: I0121 18:24:34.030545 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" podStartSLOduration=1.5818876020000001 podStartE2EDuration="2.030522397s" podCreationTimestamp="2026-01-21 18:24:32 +0000 UTC" firstStartedPulling="2026-01-21 18:24:33.128887688 +0000 UTC m=+1707.110850874" lastFinishedPulling="2026-01-21 18:24:33.577522483 +0000 UTC m=+1707.559485669" observedRunningTime="2026-01-21 18:24:34.028731258 +0000 UTC m=+1708.010694444" watchObservedRunningTime="2026-01-21 18:24:34.030522397 +0000 UTC m=+1708.012485583" Jan 21 18:24:42 crc kubenswrapper[4792]: I0121 18:24:42.665351 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:42 crc kubenswrapper[4792]: I0121 18:24:42.665974 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:42 crc kubenswrapper[4792]: I0121 18:24:42.696945 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:43 crc kubenswrapper[4792]: I0121 18:24:43.123326 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:43 crc kubenswrapper[4792]: I0121 18:24:43.174526 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:43 crc kubenswrapper[4792]: I0121 18:24:43.246790 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:24:43 crc kubenswrapper[4792]: E0121 18:24:43.247359 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:24:45 crc kubenswrapper[4792]: I0121 18:24:45.104594 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" podUID="aa8566fe-1a16-48a4-ba87-47a52deb248b" containerName="registry-server" containerID="cri-o://16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512" gracePeriod=2 Jan 21 18:24:45 crc kubenswrapper[4792]: I0121 18:24:45.505601 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:45 crc kubenswrapper[4792]: I0121 18:24:45.647841 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9ksm\" (UniqueName: \"kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm\") pod \"aa8566fe-1a16-48a4-ba87-47a52deb248b\" (UID: \"aa8566fe-1a16-48a4-ba87-47a52deb248b\") " Jan 21 18:24:45 crc kubenswrapper[4792]: I0121 18:24:45.653809 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm" (OuterVolumeSpecName: "kube-api-access-k9ksm") pod "aa8566fe-1a16-48a4-ba87-47a52deb248b" (UID: "aa8566fe-1a16-48a4-ba87-47a52deb248b"). InnerVolumeSpecName "kube-api-access-k9ksm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:24:45 crc kubenswrapper[4792]: I0121 18:24:45.750158 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9ksm\" (UniqueName: \"kubernetes.io/projected/aa8566fe-1a16-48a4-ba87-47a52deb248b-kube-api-access-k9ksm\") on node \"crc\" DevicePath \"\"" Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.113167 4792 generic.go:334] "Generic (PLEG): container finished" podID="aa8566fe-1a16-48a4-ba87-47a52deb248b" containerID="16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512" exitCode=0 Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.113233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" event={"ID":"aa8566fe-1a16-48a4-ba87-47a52deb248b","Type":"ContainerDied","Data":"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512"} Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.113273 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.113299 4792 scope.go:117] "RemoveContainer" containerID="16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512" Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.113281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-vbh2b" event={"ID":"aa8566fe-1a16-48a4-ba87-47a52deb248b","Type":"ContainerDied","Data":"2dc3df3a169299690204ad8b7ff8ee9d9d1671d574266478c994635cf0a1dc5d"} Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.147224 4792 scope.go:117] "RemoveContainer" containerID="16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512" Jan 21 18:24:46 crc kubenswrapper[4792]: E0121 18:24:46.147929 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512\": container with ID starting with 16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512 not found: ID does not exist" containerID="16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512" Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.147980 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512"} err="failed to get container status \"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512\": rpc error: code = NotFound desc = could not find container \"16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512\": container with ID starting with 16f62135e95b955d02428d4971de826d65a69ad6d7118d9f0a40ddc363067512 not found: ID does not exist" Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.156719 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.164776 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-vbh2b"] Jan 21 18:24:46 crc kubenswrapper[4792]: I0121 18:24:46.257698 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa8566fe-1a16-48a4-ba87-47a52deb248b" path="/var/lib/kubelet/pods/aa8566fe-1a16-48a4-ba87-47a52deb248b/volumes" Jan 21 18:24:57 crc kubenswrapper[4792]: I0121 18:24:57.247062 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:24:57 crc kubenswrapper[4792]: E0121 18:24:57.247837 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:25:09 crc kubenswrapper[4792]: I0121 18:25:09.246347 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:25:09 crc kubenswrapper[4792]: E0121 18:25:09.247223 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:25:23 crc kubenswrapper[4792]: I0121 18:25:23.247110 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:25:23 crc kubenswrapper[4792]: E0121 18:25:23.249138 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:25:34 crc kubenswrapper[4792]: I0121 18:25:34.247150 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:25:34 crc kubenswrapper[4792]: E0121 18:25:34.247967 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:25:46 crc kubenswrapper[4792]: I0121 18:25:46.252036 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:25:46 crc kubenswrapper[4792]: E0121 18:25:46.253222 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:25:59 crc kubenswrapper[4792]: I0121 18:25:59.247394 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:25:59 crc kubenswrapper[4792]: E0121 18:25:59.248594 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:26:10 crc kubenswrapper[4792]: I0121 18:26:10.247790 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:26:10 crc kubenswrapper[4792]: E0121 18:26:10.248571 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:26:21 crc kubenswrapper[4792]: I0121 18:26:21.247188 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:26:21 crc kubenswrapper[4792]: E0121 18:26:21.247976 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:26:36 crc kubenswrapper[4792]: I0121 18:26:36.252807 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:26:36 crc kubenswrapper[4792]: E0121 18:26:36.257037 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:26:51 crc kubenswrapper[4792]: I0121 18:26:51.246596 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:26:51 crc kubenswrapper[4792]: E0121 18:26:51.248576 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:27:03 crc kubenswrapper[4792]: I0121 18:27:03.246813 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:27:03 crc kubenswrapper[4792]: E0121 18:27:03.247545 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:27:18 crc kubenswrapper[4792]: I0121 18:27:18.246624 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:27:18 crc kubenswrapper[4792]: E0121 18:27:18.247401 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:27:30 crc kubenswrapper[4792]: I0121 18:27:30.251815 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:27:30 crc kubenswrapper[4792]: E0121 18:27:30.252635 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:27:45 crc kubenswrapper[4792]: I0121 18:27:45.247032 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:27:45 crc kubenswrapper[4792]: E0121 18:27:45.247786 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:27:58 crc kubenswrapper[4792]: I0121 18:27:58.247472 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:27:59 crc kubenswrapper[4792]: I0121 18:27:59.412955 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7"} Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.156569 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp"] Jan 21 18:30:00 crc kubenswrapper[4792]: E0121 18:30:00.157568 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa8566fe-1a16-48a4-ba87-47a52deb248b" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.157584 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa8566fe-1a16-48a4-ba87-47a52deb248b" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.157720 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa8566fe-1a16-48a4-ba87-47a52deb248b" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.158338 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.162888 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.163411 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.169390 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp"] Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.274874 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.275245 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.275572 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhngg\" (UniqueName: \"kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.377863 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhngg\" (UniqueName: \"kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.378299 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.378521 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.379289 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.393891 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.406839 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhngg\" (UniqueName: \"kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg\") pod \"collect-profiles-29483670-mzpmp\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.483461 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:00 crc kubenswrapper[4792]: I0121 18:30:00.745459 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp"] Jan 21 18:30:01 crc kubenswrapper[4792]: I0121 18:30:01.488364 4792 generic.go:334] "Generic (PLEG): container finished" podID="5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" containerID="efaa6265364aa7be2969f620f02f8d08256e99db1451bd56d0616334631bbe30" exitCode=0 Jan 21 18:30:01 crc kubenswrapper[4792]: I0121 18:30:01.488423 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" event={"ID":"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5","Type":"ContainerDied","Data":"efaa6265364aa7be2969f620f02f8d08256e99db1451bd56d0616334631bbe30"} Jan 21 18:30:01 crc kubenswrapper[4792]: I0121 18:30:01.488458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" event={"ID":"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5","Type":"ContainerStarted","Data":"d1de9a53048afd5f010ce7489fcb5765008ab9233cb6cdc89c3704b57c003f2d"} Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.745292 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.823787 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume\") pod \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.823911 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume\") pod \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.824026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhngg\" (UniqueName: \"kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg\") pod \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\" (UID: \"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5\") " Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.826670 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume" (OuterVolumeSpecName: "config-volume") pod "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" (UID: "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.831662 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" (UID: "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.831906 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg" (OuterVolumeSpecName: "kube-api-access-fhngg") pod "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" (UID: "5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5"). InnerVolumeSpecName "kube-api-access-fhngg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.925388 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhngg\" (UniqueName: \"kubernetes.io/projected/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-kube-api-access-fhngg\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.925431 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:02 crc kubenswrapper[4792]: I0121 18:30:02.925441 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:03 crc kubenswrapper[4792]: I0121 18:30:03.504934 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" event={"ID":"5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5","Type":"ContainerDied","Data":"d1de9a53048afd5f010ce7489fcb5765008ab9233cb6cdc89c3704b57c003f2d"} Jan 21 18:30:03 crc kubenswrapper[4792]: I0121 18:30:03.505002 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1de9a53048afd5f010ce7489fcb5765008ab9233cb6cdc89c3704b57c003f2d" Jan 21 18:30:03 crc kubenswrapper[4792]: I0121 18:30:03.505039 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-mzpmp" Jan 21 18:30:03 crc kubenswrapper[4792]: I0121 18:30:03.820154 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr"] Jan 21 18:30:03 crc kubenswrapper[4792]: I0121 18:30:03.826041 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-rljlr"] Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.056934 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:04 crc kubenswrapper[4792]: E0121 18:30:04.057375 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" containerName="collect-profiles" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.057401 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" containerName="collect-profiles" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.057590 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b0ffa6c-fee2-4650-83e4-992e3ab0b7f5" containerName="collect-profiles" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.058383 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.068821 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.147538 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v24ph\" (UniqueName: \"kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph\") pod \"service-telemetry-framework-operators-bghbp\" (UID: \"c905cac2-bf7e-4c97-ba00-cae492ef67dc\") " pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.249211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v24ph\" (UniqueName: \"kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph\") pod \"service-telemetry-framework-operators-bghbp\" (UID: \"c905cac2-bf7e-4c97-ba00-cae492ef67dc\") " pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.262221 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c181f61f-a9d3-4d0e-84d2-aa2bd560017b" path="/var/lib/kubelet/pods/c181f61f-a9d3-4d0e-84d2-aa2bd560017b/volumes" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.262862 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.264812 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.279087 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.281870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v24ph\" (UniqueName: \"kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph\") pod \"service-telemetry-framework-operators-bghbp\" (UID: \"c905cac2-bf7e-4c97-ba00-cae492ef67dc\") " pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.351228 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.351316 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwzwf\" (UniqueName: \"kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.351906 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.383285 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.454052 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.454122 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwzwf\" (UniqueName: \"kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.454207 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.454771 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.454829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.481237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwzwf\" (UniqueName: \"kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf\") pod \"redhat-operators-gcr5d\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.621108 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.842467 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:04 crc kubenswrapper[4792]: W0121 18:30:04.849136 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc905cac2_bf7e_4c97_ba00_cae492ef67dc.slice/crio-440a20cde226789a51fe6a7baac45a6ab32ea4c2801f64f3b43ce54847df801b WatchSource:0}: Error finding container 440a20cde226789a51fe6a7baac45a6ab32ea4c2801f64f3b43ce54847df801b: Status 404 returned error can't find the container with id 440a20cde226789a51fe6a7baac45a6ab32ea4c2801f64f3b43ce54847df801b Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.852164 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:30:04 crc kubenswrapper[4792]: I0121 18:30:04.875741 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:04 crc kubenswrapper[4792]: W0121 18:30:04.877444 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod564798d6_b749_4f7e_bc34_4c594fa1b600.slice/crio-de07ec34da9cc26c996a897d74b98923e4782a61cca55ab56eda75758bbb35bb WatchSource:0}: Error finding container de07ec34da9cc26c996a897d74b98923e4782a61cca55ab56eda75758bbb35bb: Status 404 returned error can't find the container with id de07ec34da9cc26c996a897d74b98923e4782a61cca55ab56eda75758bbb35bb Jan 21 18:30:05 crc kubenswrapper[4792]: I0121 18:30:05.526555 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-bghbp" event={"ID":"c905cac2-bf7e-4c97-ba00-cae492ef67dc","Type":"ContainerStarted","Data":"440a20cde226789a51fe6a7baac45a6ab32ea4c2801f64f3b43ce54847df801b"} Jan 21 18:30:05 crc kubenswrapper[4792]: I0121 18:30:05.528186 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerStarted","Data":"de07ec34da9cc26c996a897d74b98923e4782a61cca55ab56eda75758bbb35bb"} Jan 21 18:30:06 crc kubenswrapper[4792]: I0121 18:30:06.546809 4792 generic.go:334] "Generic (PLEG): container finished" podID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerID="900c41cc773b446e4bae7f2756dbbdfdc83457e47506a23b622b9f70bd0a60cd" exitCode=0 Jan 21 18:30:06 crc kubenswrapper[4792]: I0121 18:30:06.547933 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerDied","Data":"900c41cc773b446e4bae7f2756dbbdfdc83457e47506a23b622b9f70bd0a60cd"} Jan 21 18:30:06 crc kubenswrapper[4792]: I0121 18:30:06.551141 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-bghbp" event={"ID":"c905cac2-bf7e-4c97-ba00-cae492ef67dc","Type":"ContainerStarted","Data":"82788e15592a695e2351a3b8c674750838cbaa4d75e08bfe2ef1d677213a486a"} Jan 21 18:30:08 crc kubenswrapper[4792]: I0121 18:30:08.589199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerStarted","Data":"891c31f2f643ef51172b04f27d3c1f9eda8830164ca394096adb296896b61a3e"} Jan 21 18:30:08 crc kubenswrapper[4792]: I0121 18:30:08.618538 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-bghbp" podStartSLOduration=3.829080856 podStartE2EDuration="4.618511641s" podCreationTimestamp="2026-01-21 18:30:04 +0000 UTC" firstStartedPulling="2026-01-21 18:30:04.851863568 +0000 UTC m=+2038.833826754" lastFinishedPulling="2026-01-21 18:30:05.641294353 +0000 UTC m=+2039.623257539" observedRunningTime="2026-01-21 18:30:06.595777625 +0000 UTC m=+2040.577740811" watchObservedRunningTime="2026-01-21 18:30:08.618511641 +0000 UTC m=+2042.600474827" Jan 21 18:30:10 crc kubenswrapper[4792]: I0121 18:30:10.608926 4792 generic.go:334] "Generic (PLEG): container finished" podID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerID="891c31f2f643ef51172b04f27d3c1f9eda8830164ca394096adb296896b61a3e" exitCode=0 Jan 21 18:30:10 crc kubenswrapper[4792]: I0121 18:30:10.608985 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerDied","Data":"891c31f2f643ef51172b04f27d3c1f9eda8830164ca394096adb296896b61a3e"} Jan 21 18:30:11 crc kubenswrapper[4792]: I0121 18:30:11.625316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerStarted","Data":"7967d4672396e0155039c8cdb7fb6172b77ea755c6ebaa715b943b8d1a1b201e"} Jan 21 18:30:11 crc kubenswrapper[4792]: I0121 18:30:11.652594 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gcr5d" podStartSLOduration=2.763299779 podStartE2EDuration="7.652565234s" podCreationTimestamp="2026-01-21 18:30:04 +0000 UTC" firstStartedPulling="2026-01-21 18:30:06.550382012 +0000 UTC m=+2040.532345188" lastFinishedPulling="2026-01-21 18:30:11.439647457 +0000 UTC m=+2045.421610643" observedRunningTime="2026-01-21 18:30:11.650404985 +0000 UTC m=+2045.632368171" watchObservedRunningTime="2026-01-21 18:30:11.652565234 +0000 UTC m=+2045.634528420" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.383812 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.384499 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.436116 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.622911 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.622966 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:14 crc kubenswrapper[4792]: I0121 18:30:14.697701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:15 crc kubenswrapper[4792]: I0121 18:30:15.068377 4792 scope.go:117] "RemoveContainer" containerID="7f565a65bf855b0ebfd5423f864d2872a7d30231d2c8736d4c1bfc616cb3d67b" Jan 21 18:30:15 crc kubenswrapper[4792]: I0121 18:30:15.677022 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gcr5d" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="registry-server" probeResult="failure" output=< Jan 21 18:30:15 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Jan 21 18:30:15 crc kubenswrapper[4792]: > Jan 21 18:30:18 crc kubenswrapper[4792]: I0121 18:30:18.653063 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:18 crc kubenswrapper[4792]: I0121 18:30:18.655311 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-bghbp" podUID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" containerName="registry-server" containerID="cri-o://82788e15592a695e2351a3b8c674750838cbaa4d75e08bfe2ef1d677213a486a" gracePeriod=2 Jan 21 18:30:19 crc kubenswrapper[4792]: I0121 18:30:19.698271 4792 generic.go:334] "Generic (PLEG): container finished" podID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" containerID="82788e15592a695e2351a3b8c674750838cbaa4d75e08bfe2ef1d677213a486a" exitCode=0 Jan 21 18:30:19 crc kubenswrapper[4792]: I0121 18:30:19.698386 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-bghbp" event={"ID":"c905cac2-bf7e-4c97-ba00-cae492ef67dc","Type":"ContainerDied","Data":"82788e15592a695e2351a3b8c674750838cbaa4d75e08bfe2ef1d677213a486a"} Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.354425 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.405643 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v24ph\" (UniqueName: \"kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph\") pod \"c905cac2-bf7e-4c97-ba00-cae492ef67dc\" (UID: \"c905cac2-bf7e-4c97-ba00-cae492ef67dc\") " Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.412671 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph" (OuterVolumeSpecName: "kube-api-access-v24ph") pod "c905cac2-bf7e-4c97-ba00-cae492ef67dc" (UID: "c905cac2-bf7e-4c97-ba00-cae492ef67dc"). InnerVolumeSpecName "kube-api-access-v24ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.507450 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v24ph\" (UniqueName: \"kubernetes.io/projected/c905cac2-bf7e-4c97-ba00-cae492ef67dc-kube-api-access-v24ph\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.709834 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-bghbp" event={"ID":"c905cac2-bf7e-4c97-ba00-cae492ef67dc","Type":"ContainerDied","Data":"440a20cde226789a51fe6a7baac45a6ab32ea4c2801f64f3b43ce54847df801b"} Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.709941 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-bghbp" Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.709973 4792 scope.go:117] "RemoveContainer" containerID="82788e15592a695e2351a3b8c674750838cbaa4d75e08bfe2ef1d677213a486a" Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.741225 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:20 crc kubenswrapper[4792]: I0121 18:30:20.746609 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-bghbp"] Jan 21 18:30:22 crc kubenswrapper[4792]: I0121 18:30:22.258634 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" path="/var/lib/kubelet/pods/c905cac2-bf7e-4c97-ba00-cae492ef67dc/volumes" Jan 21 18:30:23 crc kubenswrapper[4792]: I0121 18:30:23.570579 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:30:23 crc kubenswrapper[4792]: I0121 18:30:23.570634 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:30:24 crc kubenswrapper[4792]: I0121 18:30:24.672760 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:24 crc kubenswrapper[4792]: I0121 18:30:24.722112 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:24 crc kubenswrapper[4792]: I0121 18:30:24.917038 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:25 crc kubenswrapper[4792]: I0121 18:30:25.743066 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gcr5d" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="registry-server" containerID="cri-o://7967d4672396e0155039c8cdb7fb6172b77ea755c6ebaa715b943b8d1a1b201e" gracePeriod=2 Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.754680 4792 generic.go:334] "Generic (PLEG): container finished" podID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerID="7967d4672396e0155039c8cdb7fb6172b77ea755c6ebaa715b943b8d1a1b201e" exitCode=0 Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.754744 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerDied","Data":"7967d4672396e0155039c8cdb7fb6172b77ea755c6ebaa715b943b8d1a1b201e"} Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.821221 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.995824 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities\") pod \"564798d6-b749-4f7e-bc34-4c594fa1b600\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.995934 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content\") pod \"564798d6-b749-4f7e-bc34-4c594fa1b600\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.995959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwzwf\" (UniqueName: \"kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf\") pod \"564798d6-b749-4f7e-bc34-4c594fa1b600\" (UID: \"564798d6-b749-4f7e-bc34-4c594fa1b600\") " Jan 21 18:30:26 crc kubenswrapper[4792]: I0121 18:30:26.997937 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities" (OuterVolumeSpecName: "utilities") pod "564798d6-b749-4f7e-bc34-4c594fa1b600" (UID: "564798d6-b749-4f7e-bc34-4c594fa1b600"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.009299 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf" (OuterVolumeSpecName: "kube-api-access-zwzwf") pod "564798d6-b749-4f7e-bc34-4c594fa1b600" (UID: "564798d6-b749-4f7e-bc34-4c594fa1b600"). InnerVolumeSpecName "kube-api-access-zwzwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.097213 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.097249 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwzwf\" (UniqueName: \"kubernetes.io/projected/564798d6-b749-4f7e-bc34-4c594fa1b600-kube-api-access-zwzwf\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.140598 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "564798d6-b749-4f7e-bc34-4c594fa1b600" (UID: "564798d6-b749-4f7e-bc34-4c594fa1b600"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.198368 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564798d6-b749-4f7e-bc34-4c594fa1b600-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.766965 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gcr5d" event={"ID":"564798d6-b749-4f7e-bc34-4c594fa1b600","Type":"ContainerDied","Data":"de07ec34da9cc26c996a897d74b98923e4782a61cca55ab56eda75758bbb35bb"} Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.767043 4792 scope.go:117] "RemoveContainer" containerID="7967d4672396e0155039c8cdb7fb6172b77ea755c6ebaa715b943b8d1a1b201e" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.767240 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gcr5d" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.804197 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.810456 4792 scope.go:117] "RemoveContainer" containerID="891c31f2f643ef51172b04f27d3c1f9eda8830164ca394096adb296896b61a3e" Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.813662 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gcr5d"] Jan 21 18:30:27 crc kubenswrapper[4792]: I0121 18:30:27.839151 4792 scope.go:117] "RemoveContainer" containerID="900c41cc773b446e4bae7f2756dbbdfdc83457e47506a23b622b9f70bd0a60cd" Jan 21 18:30:28 crc kubenswrapper[4792]: I0121 18:30:28.254698 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" path="/var/lib/kubelet/pods/564798d6-b749-4f7e-bc34-4c594fa1b600/volumes" Jan 21 18:30:53 crc kubenswrapper[4792]: I0121 18:30:53.571315 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:30:53 crc kubenswrapper[4792]: I0121 18:30:53.574392 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:31:23 crc kubenswrapper[4792]: I0121 18:31:23.571107 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:31:23 crc kubenswrapper[4792]: I0121 18:31:23.571741 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:31:23 crc kubenswrapper[4792]: I0121 18:31:23.571795 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:31:23 crc kubenswrapper[4792]: I0121 18:31:23.572731 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:31:23 crc kubenswrapper[4792]: I0121 18:31:23.572798 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7" gracePeriod=600 Jan 21 18:31:24 crc kubenswrapper[4792]: I0121 18:31:24.258464 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7" exitCode=0 Jan 21 18:31:24 crc kubenswrapper[4792]: I0121 18:31:24.258495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7"} Jan 21 18:31:24 crc kubenswrapper[4792]: I0121 18:31:24.259063 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4"} Jan 21 18:31:24 crc kubenswrapper[4792]: I0121 18:31:24.259085 4792 scope.go:117] "RemoveContainer" containerID="0c409c12ed2a1354aca0f11c7abce1878a05ae41b80162947c651f75c5e15586" Jan 21 18:33:23 crc kubenswrapper[4792]: I0121 18:33:23.570393 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:33:23 crc kubenswrapper[4792]: I0121 18:33:23.570956 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.429347 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wg72g"] Jan 21 18:33:27 crc kubenswrapper[4792]: E0121 18:33:27.430017 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430032 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: E0121 18:33:27.430050 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430058 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: E0121 18:33:27.430071 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="extract-content" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430080 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="extract-content" Jan 21 18:33:27 crc kubenswrapper[4792]: E0121 18:33:27.430092 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="extract-utilities" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430100 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="extract-utilities" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430266 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="564798d6-b749-4f7e-bc34-4c594fa1b600" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.430297 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c905cac2-bf7e-4c97-ba00-cae492ef67dc" containerName="registry-server" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.431533 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.443833 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-catalog-content\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.443949 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-utilities\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.443993 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfzm5\" (UniqueName: \"kubernetes.io/projected/ea522cca-99df-4ea2-8276-5602d2920f09-kube-api-access-lfzm5\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.448074 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wg72g"] Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.545254 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-catalog-content\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.545334 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-utilities\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.545371 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfzm5\" (UniqueName: \"kubernetes.io/projected/ea522cca-99df-4ea2-8276-5602d2920f09-kube-api-access-lfzm5\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.545794 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-catalog-content\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.545841 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea522cca-99df-4ea2-8276-5602d2920f09-utilities\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.570360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfzm5\" (UniqueName: \"kubernetes.io/projected/ea522cca-99df-4ea2-8276-5602d2920f09-kube-api-access-lfzm5\") pod \"certified-operators-wg72g\" (UID: \"ea522cca-99df-4ea2-8276-5602d2920f09\") " pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:27 crc kubenswrapper[4792]: I0121 18:33:27.781207 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:28 crc kubenswrapper[4792]: I0121 18:33:28.085547 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wg72g"] Jan 21 18:33:28 crc kubenswrapper[4792]: I0121 18:33:28.333895 4792 generic.go:334] "Generic (PLEG): container finished" podID="ea522cca-99df-4ea2-8276-5602d2920f09" containerID="28aef938339bb156319e970821acca10d3efa8c36a7461a4f5a3c275edcedef1" exitCode=0 Jan 21 18:33:28 crc kubenswrapper[4792]: I0121 18:33:28.333954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg72g" event={"ID":"ea522cca-99df-4ea2-8276-5602d2920f09","Type":"ContainerDied","Data":"28aef938339bb156319e970821acca10d3efa8c36a7461a4f5a3c275edcedef1"} Jan 21 18:33:28 crc kubenswrapper[4792]: I0121 18:33:28.334230 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg72g" event={"ID":"ea522cca-99df-4ea2-8276-5602d2920f09","Type":"ContainerStarted","Data":"04aed683e2e09ae647084dc9196823c27315a74e2f2b2fde62a7f05db5920ea0"} Jan 21 18:33:32 crc kubenswrapper[4792]: I0121 18:33:32.383979 4792 generic.go:334] "Generic (PLEG): container finished" podID="ea522cca-99df-4ea2-8276-5602d2920f09" containerID="d8c0cb5d2a5895d717b4e62facda08d442d7dd43cc97cf6fd417423319084393" exitCode=0 Jan 21 18:33:32 crc kubenswrapper[4792]: I0121 18:33:32.384645 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg72g" event={"ID":"ea522cca-99df-4ea2-8276-5602d2920f09","Type":"ContainerDied","Data":"d8c0cb5d2a5895d717b4e62facda08d442d7dd43cc97cf6fd417423319084393"} Jan 21 18:33:33 crc kubenswrapper[4792]: I0121 18:33:33.395198 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg72g" event={"ID":"ea522cca-99df-4ea2-8276-5602d2920f09","Type":"ContainerStarted","Data":"0f54b9c89783475f53d11430c90bc8fb60f3fca72929c2352c5090c8186aef86"} Jan 21 18:33:33 crc kubenswrapper[4792]: I0121 18:33:33.419283 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wg72g" podStartSLOduration=1.9678854380000002 podStartE2EDuration="6.419261735s" podCreationTimestamp="2026-01-21 18:33:27 +0000 UTC" firstStartedPulling="2026-01-21 18:33:28.335466921 +0000 UTC m=+2242.317430107" lastFinishedPulling="2026-01-21 18:33:32.786843228 +0000 UTC m=+2246.768806404" observedRunningTime="2026-01-21 18:33:33.414875235 +0000 UTC m=+2247.396838441" watchObservedRunningTime="2026-01-21 18:33:33.419261735 +0000 UTC m=+2247.401224931" Jan 21 18:33:37 crc kubenswrapper[4792]: I0121 18:33:37.782255 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:37 crc kubenswrapper[4792]: I0121 18:33:37.782592 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:37 crc kubenswrapper[4792]: I0121 18:33:37.822758 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:38 crc kubenswrapper[4792]: I0121 18:33:38.475667 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wg72g" Jan 21 18:33:38 crc kubenswrapper[4792]: I0121 18:33:38.548873 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wg72g"] Jan 21 18:33:38 crc kubenswrapper[4792]: I0121 18:33:38.582408 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:33:38 crc kubenswrapper[4792]: I0121 18:33:38.582721 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9crgd" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="registry-server" containerID="cri-o://3662f31ad5b7e08c8a520f730a925f67eec209455f73d7458909a295b37bddbd" gracePeriod=2 Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.446059 4792 generic.go:334] "Generic (PLEG): container finished" podID="f8948a28-6967-4016-b98c-643621434b69" containerID="3662f31ad5b7e08c8a520f730a925f67eec209455f73d7458909a295b37bddbd" exitCode=0 Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.446240 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerDied","Data":"3662f31ad5b7e08c8a520f730a925f67eec209455f73d7458909a295b37bddbd"} Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.527162 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.564716 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gkkv\" (UniqueName: \"kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv\") pod \"f8948a28-6967-4016-b98c-643621434b69\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.564865 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content\") pod \"f8948a28-6967-4016-b98c-643621434b69\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.564947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities\") pod \"f8948a28-6967-4016-b98c-643621434b69\" (UID: \"f8948a28-6967-4016-b98c-643621434b69\") " Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.566032 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities" (OuterVolumeSpecName: "utilities") pod "f8948a28-6967-4016-b98c-643621434b69" (UID: "f8948a28-6967-4016-b98c-643621434b69"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.572274 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv" (OuterVolumeSpecName: "kube-api-access-9gkkv") pod "f8948a28-6967-4016-b98c-643621434b69" (UID: "f8948a28-6967-4016-b98c-643621434b69"). InnerVolumeSpecName "kube-api-access-9gkkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.644233 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8948a28-6967-4016-b98c-643621434b69" (UID: "f8948a28-6967-4016-b98c-643621434b69"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.667374 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gkkv\" (UniqueName: \"kubernetes.io/projected/f8948a28-6967-4016-b98c-643621434b69-kube-api-access-9gkkv\") on node \"crc\" DevicePath \"\"" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.667423 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:33:39 crc kubenswrapper[4792]: I0121 18:33:39.667438 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8948a28-6967-4016-b98c-643621434b69-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.458295 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9crgd" Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.459129 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9crgd" event={"ID":"f8948a28-6967-4016-b98c-643621434b69","Type":"ContainerDied","Data":"9be69fcba5a58cd54d23ceb83907f1a93232ed6905be08ac8b1a1c143b54edb9"} Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.459171 4792 scope.go:117] "RemoveContainer" containerID="3662f31ad5b7e08c8a520f730a925f67eec209455f73d7458909a295b37bddbd" Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.488337 4792 scope.go:117] "RemoveContainer" containerID="0f872b3f86899d603aff9862aa38241167f04e9818a87f895558ca5c8f01ec1b" Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.505186 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.512001 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9crgd"] Jan 21 18:33:40 crc kubenswrapper[4792]: I0121 18:33:40.523996 4792 scope.go:117] "RemoveContainer" containerID="c720b10d4c87713c45b218e99b3cfc85a4704d16c838418015135dd78ed32ade" Jan 21 18:33:42 crc kubenswrapper[4792]: I0121 18:33:42.255606 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8948a28-6967-4016-b98c-643621434b69" path="/var/lib/kubelet/pods/f8948a28-6967-4016-b98c-643621434b69/volumes" Jan 21 18:33:53 crc kubenswrapper[4792]: I0121 18:33:53.571015 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:33:53 crc kubenswrapper[4792]: I0121 18:33:53.571553 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.085150 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:13 crc kubenswrapper[4792]: E0121 18:34:13.086108 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="extract-content" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.086126 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="extract-content" Jan 21 18:34:13 crc kubenswrapper[4792]: E0121 18:34:13.086144 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="registry-server" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.086153 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="registry-server" Jan 21 18:34:13 crc kubenswrapper[4792]: E0121 18:34:13.086167 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="extract-utilities" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.086176 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="extract-utilities" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.086338 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8948a28-6967-4016-b98c-643621434b69" containerName="registry-server" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.087519 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.096971 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.256385 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wb69\" (UniqueName: \"kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.256556 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.256689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.358498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.358595 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.358654 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wb69\" (UniqueName: \"kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.359304 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.359354 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.383043 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wb69\" (UniqueName: \"kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69\") pod \"community-operators-fnphm\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.413151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:13 crc kubenswrapper[4792]: I0121 18:34:13.791569 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:13 crc kubenswrapper[4792]: W0121 18:34:13.805006 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb03d79e2_0577_4377_b9be_6707507fb23e.slice/crio-e4306ecacc1a13919317fc286f9dfe604d3da6c4d89da20cddcf9f71163ac844 WatchSource:0}: Error finding container e4306ecacc1a13919317fc286f9dfe604d3da6c4d89da20cddcf9f71163ac844: Status 404 returned error can't find the container with id e4306ecacc1a13919317fc286f9dfe604d3da6c4d89da20cddcf9f71163ac844 Jan 21 18:34:14 crc kubenswrapper[4792]: I0121 18:34:14.716912 4792 generic.go:334] "Generic (PLEG): container finished" podID="b03d79e2-0577-4377-b9be-6707507fb23e" containerID="4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678" exitCode=0 Jan 21 18:34:14 crc kubenswrapper[4792]: I0121 18:34:14.717010 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerDied","Data":"4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678"} Jan 21 18:34:14 crc kubenswrapper[4792]: I0121 18:34:14.717256 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerStarted","Data":"e4306ecacc1a13919317fc286f9dfe604d3da6c4d89da20cddcf9f71163ac844"} Jan 21 18:34:15 crc kubenswrapper[4792]: I0121 18:34:15.728341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerStarted","Data":"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef"} Jan 21 18:34:16 crc kubenswrapper[4792]: I0121 18:34:16.740791 4792 generic.go:334] "Generic (PLEG): container finished" podID="b03d79e2-0577-4377-b9be-6707507fb23e" containerID="2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef" exitCode=0 Jan 21 18:34:16 crc kubenswrapper[4792]: I0121 18:34:16.740865 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerDied","Data":"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef"} Jan 21 18:34:17 crc kubenswrapper[4792]: I0121 18:34:17.756397 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerStarted","Data":"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4"} Jan 21 18:34:17 crc kubenswrapper[4792]: I0121 18:34:17.786509 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fnphm" podStartSLOduration=2.375186163 podStartE2EDuration="4.786485642s" podCreationTimestamp="2026-01-21 18:34:13 +0000 UTC" firstStartedPulling="2026-01-21 18:34:14.720265478 +0000 UTC m=+2288.702228664" lastFinishedPulling="2026-01-21 18:34:17.131564957 +0000 UTC m=+2291.113528143" observedRunningTime="2026-01-21 18:34:17.780611811 +0000 UTC m=+2291.762575007" watchObservedRunningTime="2026-01-21 18:34:17.786485642 +0000 UTC m=+2291.768448848" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.414438 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.417575 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.460323 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.570879 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.571306 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.571444 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.572325 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.572465 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" gracePeriod=600 Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.845341 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:23 crc kubenswrapper[4792]: I0121 18:34:23.908564 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:24 crc kubenswrapper[4792]: E0121 18:34:24.195108 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:34:24 crc kubenswrapper[4792]: I0121 18:34:24.811069 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" exitCode=0 Jan 21 18:34:24 crc kubenswrapper[4792]: I0121 18:34:24.811178 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4"} Jan 21 18:34:24 crc kubenswrapper[4792]: I0121 18:34:24.812302 4792 scope.go:117] "RemoveContainer" containerID="b849121cd81433508cc652f09348b49d5f662acb39062433dc1707671885d0c7" Jan 21 18:34:24 crc kubenswrapper[4792]: I0121 18:34:24.812909 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:34:24 crc kubenswrapper[4792]: E0121 18:34:24.813187 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:34:25 crc kubenswrapper[4792]: I0121 18:34:25.824272 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fnphm" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="registry-server" containerID="cri-o://f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4" gracePeriod=2 Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.276702 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.371486 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content\") pod \"b03d79e2-0577-4377-b9be-6707507fb23e\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.371629 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wb69\" (UniqueName: \"kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69\") pod \"b03d79e2-0577-4377-b9be-6707507fb23e\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.371690 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities\") pod \"b03d79e2-0577-4377-b9be-6707507fb23e\" (UID: \"b03d79e2-0577-4377-b9be-6707507fb23e\") " Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.373531 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities" (OuterVolumeSpecName: "utilities") pod "b03d79e2-0577-4377-b9be-6707507fb23e" (UID: "b03d79e2-0577-4377-b9be-6707507fb23e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.381012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69" (OuterVolumeSpecName: "kube-api-access-6wb69") pod "b03d79e2-0577-4377-b9be-6707507fb23e" (UID: "b03d79e2-0577-4377-b9be-6707507fb23e"). InnerVolumeSpecName "kube-api-access-6wb69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.433174 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b03d79e2-0577-4377-b9be-6707507fb23e" (UID: "b03d79e2-0577-4377-b9be-6707507fb23e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.474239 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.474319 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wb69\" (UniqueName: \"kubernetes.io/projected/b03d79e2-0577-4377-b9be-6707507fb23e-kube-api-access-6wb69\") on node \"crc\" DevicePath \"\"" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.474392 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03d79e2-0577-4377-b9be-6707507fb23e-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.833313 4792 generic.go:334] "Generic (PLEG): container finished" podID="b03d79e2-0577-4377-b9be-6707507fb23e" containerID="f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4" exitCode=0 Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.833356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerDied","Data":"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4"} Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.833373 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnphm" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.833393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnphm" event={"ID":"b03d79e2-0577-4377-b9be-6707507fb23e","Type":"ContainerDied","Data":"e4306ecacc1a13919317fc286f9dfe604d3da6c4d89da20cddcf9f71163ac844"} Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.833416 4792 scope.go:117] "RemoveContainer" containerID="f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.852691 4792 scope.go:117] "RemoveContainer" containerID="2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.867456 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.876578 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fnphm"] Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.889473 4792 scope.go:117] "RemoveContainer" containerID="4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.911019 4792 scope.go:117] "RemoveContainer" containerID="f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4" Jan 21 18:34:26 crc kubenswrapper[4792]: E0121 18:34:26.911672 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4\": container with ID starting with f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4 not found: ID does not exist" containerID="f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.911785 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4"} err="failed to get container status \"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4\": rpc error: code = NotFound desc = could not find container \"f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4\": container with ID starting with f00124fbbcb9b98f72f72183f4d57b232a6f4b52d7998ed5f2500e62288cc2f4 not found: ID does not exist" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.911831 4792 scope.go:117] "RemoveContainer" containerID="2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef" Jan 21 18:34:26 crc kubenswrapper[4792]: E0121 18:34:26.912756 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef\": container with ID starting with 2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef not found: ID does not exist" containerID="2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.912795 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef"} err="failed to get container status \"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef\": rpc error: code = NotFound desc = could not find container \"2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef\": container with ID starting with 2b098290a8fd0c896e944f83e729f8e14910906b5b9b1a39cfd0d4dc85a66cef not found: ID does not exist" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.912816 4792 scope.go:117] "RemoveContainer" containerID="4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678" Jan 21 18:34:26 crc kubenswrapper[4792]: E0121 18:34:26.913254 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678\": container with ID starting with 4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678 not found: ID does not exist" containerID="4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678" Jan 21 18:34:26 crc kubenswrapper[4792]: I0121 18:34:26.913301 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678"} err="failed to get container status \"4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678\": rpc error: code = NotFound desc = could not find container \"4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678\": container with ID starting with 4fdf97b10f270889a2e2a0f4da8e74ca4e1b8f4e6bc16b804fdc0d2e589f9678 not found: ID does not exist" Jan 21 18:34:28 crc kubenswrapper[4792]: I0121 18:34:28.255538 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" path="/var/lib/kubelet/pods/b03d79e2-0577-4377-b9be-6707507fb23e/volumes" Jan 21 18:34:37 crc kubenswrapper[4792]: I0121 18:34:37.247580 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:34:37 crc kubenswrapper[4792]: E0121 18:34:37.248452 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:34:50 crc kubenswrapper[4792]: I0121 18:34:50.250999 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:34:50 crc kubenswrapper[4792]: E0121 18:34:50.251661 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:35:02 crc kubenswrapper[4792]: I0121 18:35:02.246698 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:35:02 crc kubenswrapper[4792]: E0121 18:35:02.247572 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:35:13 crc kubenswrapper[4792]: I0121 18:35:13.246491 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:35:13 crc kubenswrapper[4792]: E0121 18:35:13.247426 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:35:28 crc kubenswrapper[4792]: I0121 18:35:28.246451 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:35:28 crc kubenswrapper[4792]: E0121 18:35:28.247718 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:35:39 crc kubenswrapper[4792]: I0121 18:35:39.247163 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:35:39 crc kubenswrapper[4792]: E0121 18:35:39.247829 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:35:54 crc kubenswrapper[4792]: I0121 18:35:54.250178 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:35:54 crc kubenswrapper[4792]: E0121 18:35:54.251555 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:36:05 crc kubenswrapper[4792]: I0121 18:36:05.247558 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:36:05 crc kubenswrapper[4792]: E0121 18:36:05.249003 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:36:17 crc kubenswrapper[4792]: I0121 18:36:17.246979 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:36:17 crc kubenswrapper[4792]: E0121 18:36:17.247726 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:36:30 crc kubenswrapper[4792]: I0121 18:36:30.246943 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:36:30 crc kubenswrapper[4792]: E0121 18:36:30.247495 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:36:44 crc kubenswrapper[4792]: I0121 18:36:44.247826 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:36:44 crc kubenswrapper[4792]: E0121 18:36:44.249019 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:36:56 crc kubenswrapper[4792]: I0121 18:36:56.254203 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:36:56 crc kubenswrapper[4792]: E0121 18:36:56.254723 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:37:10 crc kubenswrapper[4792]: I0121 18:37:10.247163 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:37:10 crc kubenswrapper[4792]: E0121 18:37:10.248337 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:37:23 crc kubenswrapper[4792]: I0121 18:37:23.246274 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:37:23 crc kubenswrapper[4792]: E0121 18:37:23.247060 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:37:37 crc kubenswrapper[4792]: I0121 18:37:37.246444 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:37:37 crc kubenswrapper[4792]: E0121 18:37:37.249766 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:37:51 crc kubenswrapper[4792]: I0121 18:37:51.247196 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:37:51 crc kubenswrapper[4792]: E0121 18:37:51.248005 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:38:04 crc kubenswrapper[4792]: I0121 18:38:04.247550 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:38:04 crc kubenswrapper[4792]: E0121 18:38:04.248377 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:38:17 crc kubenswrapper[4792]: I0121 18:38:17.247110 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:38:17 crc kubenswrapper[4792]: E0121 18:38:17.247979 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:38:32 crc kubenswrapper[4792]: I0121 18:38:32.247474 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:38:32 crc kubenswrapper[4792]: E0121 18:38:32.248631 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:38:44 crc kubenswrapper[4792]: I0121 18:38:44.246331 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:38:44 crc kubenswrapper[4792]: E0121 18:38:44.247072 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:38:58 crc kubenswrapper[4792]: I0121 18:38:58.248200 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:38:58 crc kubenswrapper[4792]: E0121 18:38:58.248812 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:39:12 crc kubenswrapper[4792]: I0121 18:39:12.248230 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:39:12 crc kubenswrapper[4792]: E0121 18:39:12.249041 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:39:26 crc kubenswrapper[4792]: I0121 18:39:26.246753 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:39:26 crc kubenswrapper[4792]: I0121 18:39:26.543956 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48"} Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.146697 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:33 crc kubenswrapper[4792]: E0121 18:39:33.148146 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="extract-content" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.148171 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="extract-content" Jan 21 18:39:33 crc kubenswrapper[4792]: E0121 18:39:33.148193 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="registry-server" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.148204 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="registry-server" Jan 21 18:39:33 crc kubenswrapper[4792]: E0121 18:39:33.148224 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="extract-utilities" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.148234 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="extract-utilities" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.148412 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b03d79e2-0577-4377-b9be-6707507fb23e" containerName="registry-server" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.149201 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.154089 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57z8l\" (UniqueName: \"kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l\") pod \"service-telemetry-framework-operators-8q7d5\" (UID: \"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0\") " pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.159016 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.257140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57z8l\" (UniqueName: \"kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l\") pod \"service-telemetry-framework-operators-8q7d5\" (UID: \"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0\") " pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.294199 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57z8l\" (UniqueName: \"kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l\") pod \"service-telemetry-framework-operators-8q7d5\" (UID: \"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0\") " pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.474468 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.891757 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:33 crc kubenswrapper[4792]: I0121 18:39:33.906017 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:39:34 crc kubenswrapper[4792]: I0121 18:39:34.610793 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" event={"ID":"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0","Type":"ContainerStarted","Data":"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc"} Jan 21 18:39:34 crc kubenswrapper[4792]: I0121 18:39:34.611007 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" event={"ID":"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0","Type":"ContainerStarted","Data":"03aff8e6ed4e8358d98508efc842c76c5b242cb4ea1c00c2d11481eaa32e72db"} Jan 21 18:39:34 crc kubenswrapper[4792]: I0121 18:39:34.631887 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" podStartSLOduration=1.5110795879999999 podStartE2EDuration="1.631865785s" podCreationTimestamp="2026-01-21 18:39:33 +0000 UTC" firstStartedPulling="2026-01-21 18:39:33.905583953 +0000 UTC m=+2607.887547149" lastFinishedPulling="2026-01-21 18:39:34.02637016 +0000 UTC m=+2608.008333346" observedRunningTime="2026-01-21 18:39:34.627926797 +0000 UTC m=+2608.609889973" watchObservedRunningTime="2026-01-21 18:39:34.631865785 +0000 UTC m=+2608.613828971" Jan 21 18:39:43 crc kubenswrapper[4792]: I0121 18:39:43.474650 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:43 crc kubenswrapper[4792]: I0121 18:39:43.475232 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:43 crc kubenswrapper[4792]: I0121 18:39:43.511194 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:43 crc kubenswrapper[4792]: I0121 18:39:43.717941 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:43 crc kubenswrapper[4792]: I0121 18:39:43.769306 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:45 crc kubenswrapper[4792]: I0121 18:39:45.699694 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" podUID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" containerName="registry-server" containerID="cri-o://6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc" gracePeriod=2 Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.084506 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.279059 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57z8l\" (UniqueName: \"kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l\") pod \"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0\" (UID: \"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0\") " Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.287793 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l" (OuterVolumeSpecName: "kube-api-access-57z8l") pod "a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" (UID: "a04e2bb8-0176-4fd0-9f24-70caafc3c0e0"). InnerVolumeSpecName "kube-api-access-57z8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.382602 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57z8l\" (UniqueName: \"kubernetes.io/projected/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0-kube-api-access-57z8l\") on node \"crc\" DevicePath \"\"" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.709577 4792 generic.go:334] "Generic (PLEG): container finished" podID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" containerID="6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc" exitCode=0 Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.711286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" event={"ID":"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0","Type":"ContainerDied","Data":"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc"} Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.711417 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" event={"ID":"a04e2bb8-0176-4fd0-9f24-70caafc3c0e0","Type":"ContainerDied","Data":"03aff8e6ed4e8358d98508efc842c76c5b242cb4ea1c00c2d11481eaa32e72db"} Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.711545 4792 scope.go:117] "RemoveContainer" containerID="6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.711785 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-8q7d5" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.738238 4792 scope.go:117] "RemoveContainer" containerID="6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc" Jan 21 18:39:46 crc kubenswrapper[4792]: E0121 18:39:46.739416 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc\": container with ID starting with 6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc not found: ID does not exist" containerID="6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.739559 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc"} err="failed to get container status \"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc\": rpc error: code = NotFound desc = could not find container \"6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc\": container with ID starting with 6a1b6c1cfe6d5bd411567f854b117663a8bf0e4e2c48981d26ae3882d45466bc not found: ID does not exist" Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.757984 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:46 crc kubenswrapper[4792]: I0121 18:39:46.769555 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-8q7d5"] Jan 21 18:39:48 crc kubenswrapper[4792]: I0121 18:39:48.256674 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" path="/var/lib/kubelet/pods/a04e2bb8-0176-4fd0-9f24-70caafc3c0e0/volumes" Jan 21 18:41:53 crc kubenswrapper[4792]: I0121 18:41:53.570723 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:41:53 crc kubenswrapper[4792]: I0121 18:41:53.571322 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:42:23 crc kubenswrapper[4792]: I0121 18:42:23.571013 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:42:23 crc kubenswrapper[4792]: I0121 18:42:23.571718 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.495834 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:42:43 crc kubenswrapper[4792]: E0121 18:42:43.496816 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" containerName="registry-server" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.496833 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" containerName="registry-server" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.497022 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a04e2bb8-0176-4fd0-9f24-70caafc3c0e0" containerName="registry-server" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.498228 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.515289 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.612572 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.612690 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgz6h\" (UniqueName: \"kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.612717 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.714364 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.714462 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgz6h\" (UniqueName: \"kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.714490 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.715300 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.715383 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.747111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgz6h\" (UniqueName: \"kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h\") pod \"redhat-operators-5fz2h\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:43 crc kubenswrapper[4792]: I0121 18:42:43.818325 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:44 crc kubenswrapper[4792]: I0121 18:42:44.138323 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:42:44 crc kubenswrapper[4792]: I0121 18:42:44.260669 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerStarted","Data":"0f56c8a5d105bfbfcab934ff4ae19c0e2a3904a6fe3bb1128889e5b24df8d0ae"} Jan 21 18:42:45 crc kubenswrapper[4792]: I0121 18:42:45.260005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerDied","Data":"5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b"} Jan 21 18:42:45 crc kubenswrapper[4792]: I0121 18:42:45.259907 4792 generic.go:334] "Generic (PLEG): container finished" podID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerID="5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b" exitCode=0 Jan 21 18:42:46 crc kubenswrapper[4792]: I0121 18:42:46.289036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerStarted","Data":"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7"} Jan 21 18:42:48 crc kubenswrapper[4792]: I0121 18:42:48.309595 4792 generic.go:334] "Generic (PLEG): container finished" podID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerID="be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7" exitCode=0 Jan 21 18:42:48 crc kubenswrapper[4792]: I0121 18:42:48.309703 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerDied","Data":"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7"} Jan 21 18:42:51 crc kubenswrapper[4792]: I0121 18:42:51.335254 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerStarted","Data":"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a"} Jan 21 18:42:51 crc kubenswrapper[4792]: I0121 18:42:51.360158 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5fz2h" podStartSLOduration=3.163284734 podStartE2EDuration="8.360138529s" podCreationTimestamp="2026-01-21 18:42:43 +0000 UTC" firstStartedPulling="2026-01-21 18:42:45.262455157 +0000 UTC m=+2799.244418343" lastFinishedPulling="2026-01-21 18:42:50.459308952 +0000 UTC m=+2804.441272138" observedRunningTime="2026-01-21 18:42:51.356575053 +0000 UTC m=+2805.338538249" watchObservedRunningTime="2026-01-21 18:42:51.360138529 +0000 UTC m=+2805.342101715" Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.572370 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.572782 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.573184 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.573818 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.573946 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48" gracePeriod=600 Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.819945 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:53 crc kubenswrapper[4792]: I0121 18:42:53.820346 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:42:54 crc kubenswrapper[4792]: I0121 18:42:54.361282 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48" exitCode=0 Jan 21 18:42:54 crc kubenswrapper[4792]: I0121 18:42:54.361330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48"} Jan 21 18:42:54 crc kubenswrapper[4792]: I0121 18:42:54.361365 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6"} Jan 21 18:42:54 crc kubenswrapper[4792]: I0121 18:42:54.361419 4792 scope.go:117] "RemoveContainer" containerID="c87f3ccf8a383bd7bdc11459fbc9a2be44507b5ac3a788dbe6b7341faa4ec4f4" Jan 21 18:42:54 crc kubenswrapper[4792]: I0121 18:42:54.871576 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5fz2h" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="registry-server" probeResult="failure" output=< Jan 21 18:42:54 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Jan 21 18:42:54 crc kubenswrapper[4792]: > Jan 21 18:43:03 crc kubenswrapper[4792]: I0121 18:43:03.870937 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:43:03 crc kubenswrapper[4792]: I0121 18:43:03.921047 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:43:04 crc kubenswrapper[4792]: I0121 18:43:04.126948 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.453129 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5fz2h" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="registry-server" containerID="cri-o://8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a" gracePeriod=2 Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.850626 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.941302 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content\") pod \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.941753 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgz6h\" (UniqueName: \"kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h\") pod \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.941790 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities\") pod \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\" (UID: \"9abd6ec3-32dd-42df-87de-3537ccb56fa8\") " Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.950066 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h" (OuterVolumeSpecName: "kube-api-access-kgz6h") pod "9abd6ec3-32dd-42df-87de-3537ccb56fa8" (UID: "9abd6ec3-32dd-42df-87de-3537ccb56fa8"). InnerVolumeSpecName "kube-api-access-kgz6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:43:05 crc kubenswrapper[4792]: I0121 18:43:05.950404 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities" (OuterVolumeSpecName: "utilities") pod "9abd6ec3-32dd-42df-87de-3537ccb56fa8" (UID: "9abd6ec3-32dd-42df-87de-3537ccb56fa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.045279 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgz6h\" (UniqueName: \"kubernetes.io/projected/9abd6ec3-32dd-42df-87de-3537ccb56fa8-kube-api-access-kgz6h\") on node \"crc\" DevicePath \"\"" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.045348 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.087624 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9abd6ec3-32dd-42df-87de-3537ccb56fa8" (UID: "9abd6ec3-32dd-42df-87de-3537ccb56fa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.146517 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9abd6ec3-32dd-42df-87de-3537ccb56fa8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.464504 4792 generic.go:334] "Generic (PLEG): container finished" podID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerID="8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a" exitCode=0 Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.464554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerDied","Data":"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a"} Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.464587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5fz2h" event={"ID":"9abd6ec3-32dd-42df-87de-3537ccb56fa8","Type":"ContainerDied","Data":"0f56c8a5d105bfbfcab934ff4ae19c0e2a3904a6fe3bb1128889e5b24df8d0ae"} Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.464604 4792 scope.go:117] "RemoveContainer" containerID="8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.464807 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5fz2h" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.491720 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.496653 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5fz2h"] Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.521001 4792 scope.go:117] "RemoveContainer" containerID="be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.550272 4792 scope.go:117] "RemoveContainer" containerID="5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.587806 4792 scope.go:117] "RemoveContainer" containerID="8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a" Jan 21 18:43:06 crc kubenswrapper[4792]: E0121 18:43:06.588721 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a\": container with ID starting with 8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a not found: ID does not exist" containerID="8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.588782 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a"} err="failed to get container status \"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a\": rpc error: code = NotFound desc = could not find container \"8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a\": container with ID starting with 8017cb128fafeba125fc044a510d801245b5efca6693b90d9d0b89a5fde8949a not found: ID does not exist" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.588828 4792 scope.go:117] "RemoveContainer" containerID="be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7" Jan 21 18:43:06 crc kubenswrapper[4792]: E0121 18:43:06.589255 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7\": container with ID starting with be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7 not found: ID does not exist" containerID="be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.589290 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7"} err="failed to get container status \"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7\": rpc error: code = NotFound desc = could not find container \"be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7\": container with ID starting with be9849a25c0954843e4ba7f978c7cf3257d69b8c0de9ba806900415b8faa9be7 not found: ID does not exist" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.589315 4792 scope.go:117] "RemoveContainer" containerID="5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b" Jan 21 18:43:06 crc kubenswrapper[4792]: E0121 18:43:06.589874 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b\": container with ID starting with 5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b not found: ID does not exist" containerID="5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b" Jan 21 18:43:06 crc kubenswrapper[4792]: I0121 18:43:06.589947 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b"} err="failed to get container status \"5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b\": rpc error: code = NotFound desc = could not find container \"5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b\": container with ID starting with 5edb5c84dd653f4d9422fea2faa68cb18af508bb7d4348c13994b988817a860b not found: ID does not exist" Jan 21 18:43:08 crc kubenswrapper[4792]: I0121 18:43:08.256804 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" path="/var/lib/kubelet/pods/9abd6ec3-32dd-42df-87de-3537ccb56fa8/volumes" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.978782 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:36 crc kubenswrapper[4792]: E0121 18:44:36.981026 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="extract-utilities" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.981131 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="extract-utilities" Jan 21 18:44:36 crc kubenswrapper[4792]: E0121 18:44:36.981222 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="registry-server" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.981283 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="registry-server" Jan 21 18:44:36 crc kubenswrapper[4792]: E0121 18:44:36.981347 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="extract-content" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.981408 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="extract-content" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.981644 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9abd6ec3-32dd-42df-87de-3537ccb56fa8" containerName="registry-server" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.982398 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:36 crc kubenswrapper[4792]: I0121 18:44:36.994961 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.133755 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zhzd\" (UniqueName: \"kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd\") pod \"service-telemetry-framework-operators-h648g\" (UID: \"4229baf1-d264-4f99-8393-390b37799bfe\") " pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.235002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zhzd\" (UniqueName: \"kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd\") pod \"service-telemetry-framework-operators-h648g\" (UID: \"4229baf1-d264-4f99-8393-390b37799bfe\") " pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.256170 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zhzd\" (UniqueName: \"kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd\") pod \"service-telemetry-framework-operators-h648g\" (UID: \"4229baf1-d264-4f99-8393-390b37799bfe\") " pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.306456 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.594426 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:37 crc kubenswrapper[4792]: I0121 18:44:37.601766 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:44:38 crc kubenswrapper[4792]: I0121 18:44:38.267086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-h648g" event={"ID":"4229baf1-d264-4f99-8393-390b37799bfe","Type":"ContainerStarted","Data":"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c"} Jan 21 18:44:38 crc kubenswrapper[4792]: I0121 18:44:38.269537 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-h648g" event={"ID":"4229baf1-d264-4f99-8393-390b37799bfe","Type":"ContainerStarted","Data":"16a94135ab30cbcbce7b3ef58588dcb9588a638104de4fdeb6cff2ad6e64e328"} Jan 21 18:44:38 crc kubenswrapper[4792]: I0121 18:44:38.295919 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-h648g" podStartSLOduration=2.156010699 podStartE2EDuration="2.295893788s" podCreationTimestamp="2026-01-21 18:44:36 +0000 UTC" firstStartedPulling="2026-01-21 18:44:37.601434555 +0000 UTC m=+2911.583397741" lastFinishedPulling="2026-01-21 18:44:37.741317634 +0000 UTC m=+2911.723280830" observedRunningTime="2026-01-21 18:44:38.293681368 +0000 UTC m=+2912.275644584" watchObservedRunningTime="2026-01-21 18:44:38.295893788 +0000 UTC m=+2912.277856974" Jan 21 18:44:47 crc kubenswrapper[4792]: I0121 18:44:47.306796 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:47 crc kubenswrapper[4792]: I0121 18:44:47.308628 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:47 crc kubenswrapper[4792]: I0121 18:44:47.346336 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:47 crc kubenswrapper[4792]: I0121 18:44:47.384384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:47 crc kubenswrapper[4792]: I0121 18:44:47.583491 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:49 crc kubenswrapper[4792]: I0121 18:44:49.366248 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-h648g" podUID="4229baf1-d264-4f99-8393-390b37799bfe" containerName="registry-server" containerID="cri-o://c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c" gracePeriod=2 Jan 21 18:44:49 crc kubenswrapper[4792]: I0121 18:44:49.766131 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:49 crc kubenswrapper[4792]: I0121 18:44:49.872384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zhzd\" (UniqueName: \"kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd\") pod \"4229baf1-d264-4f99-8393-390b37799bfe\" (UID: \"4229baf1-d264-4f99-8393-390b37799bfe\") " Jan 21 18:44:49 crc kubenswrapper[4792]: I0121 18:44:49.887094 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd" (OuterVolumeSpecName: "kube-api-access-8zhzd") pod "4229baf1-d264-4f99-8393-390b37799bfe" (UID: "4229baf1-d264-4f99-8393-390b37799bfe"). InnerVolumeSpecName "kube-api-access-8zhzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:44:49 crc kubenswrapper[4792]: I0121 18:44:49.975229 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zhzd\" (UniqueName: \"kubernetes.io/projected/4229baf1-d264-4f99-8393-390b37799bfe-kube-api-access-8zhzd\") on node \"crc\" DevicePath \"\"" Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.375979 4792 generic.go:334] "Generic (PLEG): container finished" podID="4229baf1-d264-4f99-8393-390b37799bfe" containerID="c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c" exitCode=0 Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.376047 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-h648g" event={"ID":"4229baf1-d264-4f99-8393-390b37799bfe","Type":"ContainerDied","Data":"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c"} Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.376093 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-h648g" event={"ID":"4229baf1-d264-4f99-8393-390b37799bfe","Type":"ContainerDied","Data":"16a94135ab30cbcbce7b3ef58588dcb9588a638104de4fdeb6cff2ad6e64e328"} Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.376111 4792 scope.go:117] "RemoveContainer" containerID="c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c" Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.376250 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-h648g" Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.399573 4792 scope.go:117] "RemoveContainer" containerID="c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c" Jan 21 18:44:50 crc kubenswrapper[4792]: E0121 18:44:50.400276 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c\": container with ID starting with c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c not found: ID does not exist" containerID="c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c" Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.400326 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c"} err="failed to get container status \"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c\": rpc error: code = NotFound desc = could not find container \"c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c\": container with ID starting with c614df09f56211c0ade2941c5d8c6063050ce4390e5cfff6e51c8fbfaf431c2c not found: ID does not exist" Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.404682 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:50 crc kubenswrapper[4792]: I0121 18:44:50.411072 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-h648g"] Jan 21 18:44:52 crc kubenswrapper[4792]: I0121 18:44:52.263947 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4229baf1-d264-4f99-8393-390b37799bfe" path="/var/lib/kubelet/pods/4229baf1-d264-4f99-8393-390b37799bfe/volumes" Jan 21 18:44:53 crc kubenswrapper[4792]: I0121 18:44:53.570214 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:44:53 crc kubenswrapper[4792]: I0121 18:44:53.570533 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.155113 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g"] Jan 21 18:45:00 crc kubenswrapper[4792]: E0121 18:45:00.156290 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4229baf1-d264-4f99-8393-390b37799bfe" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.156336 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4229baf1-d264-4f99-8393-390b37799bfe" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.156605 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4229baf1-d264-4f99-8393-390b37799bfe" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.157952 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.162966 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.163834 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.168841 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g"] Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.266794 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.266933 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdnpd\" (UniqueName: \"kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.266974 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.368827 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.369071 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdnpd\" (UniqueName: \"kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.370325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.372396 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.378810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.390926 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdnpd\" (UniqueName: \"kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd\") pod \"collect-profiles-29483685-qzs2g\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:00 crc kubenswrapper[4792]: I0121 18:45:00.502955 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:01 crc kubenswrapper[4792]: I0121 18:45:01.623792 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g"] Jan 21 18:45:02 crc kubenswrapper[4792]: I0121 18:45:02.494281 4792 generic.go:334] "Generic (PLEG): container finished" podID="67cf2616-18bd-4902-8b71-40e314413b30" containerID="96eecbf6a2a5967bf086acbd94c3a53250872227e4a3eeedf84642fc96627b87" exitCode=0 Jan 21 18:45:02 crc kubenswrapper[4792]: I0121 18:45:02.494689 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" event={"ID":"67cf2616-18bd-4902-8b71-40e314413b30","Type":"ContainerDied","Data":"96eecbf6a2a5967bf086acbd94c3a53250872227e4a3eeedf84642fc96627b87"} Jan 21 18:45:02 crc kubenswrapper[4792]: I0121 18:45:02.494722 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" event={"ID":"67cf2616-18bd-4902-8b71-40e314413b30","Type":"ContainerStarted","Data":"9b18f7520bb7d5c3d08c237cd21bf1c1cfd6c0f38a437f0e7a4ebd3d37fad9f2"} Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.777276 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.940628 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume\") pod \"67cf2616-18bd-4902-8b71-40e314413b30\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.940699 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdnpd\" (UniqueName: \"kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd\") pod \"67cf2616-18bd-4902-8b71-40e314413b30\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.940721 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume\") pod \"67cf2616-18bd-4902-8b71-40e314413b30\" (UID: \"67cf2616-18bd-4902-8b71-40e314413b30\") " Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.941215 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume" (OuterVolumeSpecName: "config-volume") pod "67cf2616-18bd-4902-8b71-40e314413b30" (UID: "67cf2616-18bd-4902-8b71-40e314413b30"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.942143 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67cf2616-18bd-4902-8b71-40e314413b30-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.946452 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "67cf2616-18bd-4902-8b71-40e314413b30" (UID: "67cf2616-18bd-4902-8b71-40e314413b30"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:45:03 crc kubenswrapper[4792]: I0121 18:45:03.947151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd" (OuterVolumeSpecName: "kube-api-access-gdnpd") pod "67cf2616-18bd-4902-8b71-40e314413b30" (UID: "67cf2616-18bd-4902-8b71-40e314413b30"). InnerVolumeSpecName "kube-api-access-gdnpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.043878 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdnpd\" (UniqueName: \"kubernetes.io/projected/67cf2616-18bd-4902-8b71-40e314413b30-kube-api-access-gdnpd\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.043940 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67cf2616-18bd-4902-8b71-40e314413b30-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.513637 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" event={"ID":"67cf2616-18bd-4902-8b71-40e314413b30","Type":"ContainerDied","Data":"9b18f7520bb7d5c3d08c237cd21bf1c1cfd6c0f38a437f0e7a4ebd3d37fad9f2"} Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.513686 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b18f7520bb7d5c3d08c237cd21bf1c1cfd6c0f38a437f0e7a4ebd3d37fad9f2" Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.513737 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qzs2g" Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.871804 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8"] Jan 21 18:45:04 crc kubenswrapper[4792]: I0121 18:45:04.881939 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-crld8"] Jan 21 18:45:06 crc kubenswrapper[4792]: I0121 18:45:06.259109 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3790b108-c0de-47f7-af45-05e506227385" path="/var/lib/kubelet/pods/3790b108-c0de-47f7-af45-05e506227385/volumes" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.438964 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:11 crc kubenswrapper[4792]: E0121 18:45:11.439675 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67cf2616-18bd-4902-8b71-40e314413b30" containerName="collect-profiles" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.439983 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="67cf2616-18bd-4902-8b71-40e314413b30" containerName="collect-profiles" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.440184 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="67cf2616-18bd-4902-8b71-40e314413b30" containerName="collect-profiles" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.441525 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.454277 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.578311 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.578427 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq4z6\" (UniqueName: \"kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.578515 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.680118 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq4z6\" (UniqueName: \"kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.680236 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.680444 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.681103 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.681176 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.717622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq4z6\" (UniqueName: \"kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6\") pod \"community-operators-4jhhr\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:11 crc kubenswrapper[4792]: I0121 18:45:11.761817 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:12 crc kubenswrapper[4792]: I0121 18:45:12.131832 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:12 crc kubenswrapper[4792]: I0121 18:45:12.609179 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerID="9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86" exitCode=0 Jan 21 18:45:12 crc kubenswrapper[4792]: I0121 18:45:12.609243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerDied","Data":"9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86"} Jan 21 18:45:12 crc kubenswrapper[4792]: I0121 18:45:12.609283 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerStarted","Data":"33bf85ab966d45406c985a433476d20ff028bc94fe417dd43e3bdf3f4e145b03"} Jan 21 18:45:13 crc kubenswrapper[4792]: I0121 18:45:13.624102 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerStarted","Data":"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5"} Jan 21 18:45:14 crc kubenswrapper[4792]: I0121 18:45:14.632814 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerID="34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5" exitCode=0 Jan 21 18:45:14 crc kubenswrapper[4792]: I0121 18:45:14.632896 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerDied","Data":"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5"} Jan 21 18:45:15 crc kubenswrapper[4792]: I0121 18:45:15.432124 4792 scope.go:117] "RemoveContainer" containerID="fd6a74f940f0f3eaf7cf372679f19728e66d8b68a4628f855d2a9b9cf417f597" Jan 21 18:45:15 crc kubenswrapper[4792]: I0121 18:45:15.653393 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerStarted","Data":"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823"} Jan 21 18:45:15 crc kubenswrapper[4792]: I0121 18:45:15.684514 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4jhhr" podStartSLOduration=2.24464126 podStartE2EDuration="4.684488761s" podCreationTimestamp="2026-01-21 18:45:11 +0000 UTC" firstStartedPulling="2026-01-21 18:45:12.616865891 +0000 UTC m=+2946.598829077" lastFinishedPulling="2026-01-21 18:45:15.056713392 +0000 UTC m=+2949.038676578" observedRunningTime="2026-01-21 18:45:15.679027706 +0000 UTC m=+2949.660990922" watchObservedRunningTime="2026-01-21 18:45:15.684488761 +0000 UTC m=+2949.666451957" Jan 21 18:45:21 crc kubenswrapper[4792]: I0121 18:45:21.762740 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:21 crc kubenswrapper[4792]: I0121 18:45:21.763332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:21 crc kubenswrapper[4792]: I0121 18:45:21.807626 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:21 crc kubenswrapper[4792]: I0121 18:45:21.868824 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:22 crc kubenswrapper[4792]: I0121 18:45:22.052870 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:23 crc kubenswrapper[4792]: I0121 18:45:23.570970 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:45:23 crc kubenswrapper[4792]: I0121 18:45:23.571777 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:23 crc kubenswrapper[4792]: I0121 18:45:23.799545 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4jhhr" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="registry-server" containerID="cri-o://36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823" gracePeriod=2 Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.713459 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.808805 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerID="36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823" exitCode=0 Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.808892 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4jhhr" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.808913 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerDied","Data":"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823"} Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.809036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4jhhr" event={"ID":"b5a40d7c-d615-447d-bd6c-b00ec69e1513","Type":"ContainerDied","Data":"33bf85ab966d45406c985a433476d20ff028bc94fe417dd43e3bdf3f4e145b03"} Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.809064 4792 scope.go:117] "RemoveContainer" containerID="36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.812900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq4z6\" (UniqueName: \"kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6\") pod \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.812999 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content\") pod \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.813172 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities\") pod \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\" (UID: \"b5a40d7c-d615-447d-bd6c-b00ec69e1513\") " Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.814555 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities" (OuterVolumeSpecName: "utilities") pod "b5a40d7c-d615-447d-bd6c-b00ec69e1513" (UID: "b5a40d7c-d615-447d-bd6c-b00ec69e1513"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.822159 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6" (OuterVolumeSpecName: "kube-api-access-xq4z6") pod "b5a40d7c-d615-447d-bd6c-b00ec69e1513" (UID: "b5a40d7c-d615-447d-bd6c-b00ec69e1513"). InnerVolumeSpecName "kube-api-access-xq4z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.832519 4792 scope.go:117] "RemoveContainer" containerID="34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.859418 4792 scope.go:117] "RemoveContainer" containerID="9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.879592 4792 scope.go:117] "RemoveContainer" containerID="36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.880115 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5a40d7c-d615-447d-bd6c-b00ec69e1513" (UID: "b5a40d7c-d615-447d-bd6c-b00ec69e1513"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:45:24 crc kubenswrapper[4792]: E0121 18:45:24.880121 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823\": container with ID starting with 36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823 not found: ID does not exist" containerID="36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.880563 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823"} err="failed to get container status \"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823\": rpc error: code = NotFound desc = could not find container \"36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823\": container with ID starting with 36c64af6c1d9587dfdfa4d50526dc3aef9e18d209018fc94a83cd123490a8823 not found: ID does not exist" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.880612 4792 scope.go:117] "RemoveContainer" containerID="34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5" Jan 21 18:45:24 crc kubenswrapper[4792]: E0121 18:45:24.881245 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5\": container with ID starting with 34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5 not found: ID does not exist" containerID="34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.881285 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5"} err="failed to get container status \"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5\": rpc error: code = NotFound desc = could not find container \"34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5\": container with ID starting with 34497705e8f5ff5bf9ea2cc2cfb117a1f652be86d28a75a7e53300e5fbd5e1b5 not found: ID does not exist" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.881315 4792 scope.go:117] "RemoveContainer" containerID="9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86" Jan 21 18:45:24 crc kubenswrapper[4792]: E0121 18:45:24.881696 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86\": container with ID starting with 9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86 not found: ID does not exist" containerID="9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.881764 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86"} err="failed to get container status \"9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86\": rpc error: code = NotFound desc = could not find container \"9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86\": container with ID starting with 9da372274af833b6e53b512eabcdf638fcb190f6deb0c2f5c3fefe42875caf86 not found: ID does not exist" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.914911 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.914961 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5a40d7c-d615-447d-bd6c-b00ec69e1513-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:24 crc kubenswrapper[4792]: I0121 18:45:24.914972 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq4z6\" (UniqueName: \"kubernetes.io/projected/b5a40d7c-d615-447d-bd6c-b00ec69e1513-kube-api-access-xq4z6\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:25 crc kubenswrapper[4792]: I0121 18:45:25.139591 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:25 crc kubenswrapper[4792]: I0121 18:45:25.146518 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4jhhr"] Jan 21 18:45:26 crc kubenswrapper[4792]: I0121 18:45:26.255172 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" path="/var/lib/kubelet/pods/b5a40d7c-d615-447d-bd6c-b00ec69e1513/volumes" Jan 21 18:45:53 crc kubenswrapper[4792]: I0121 18:45:53.572947 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:45:53 crc kubenswrapper[4792]: I0121 18:45:53.573470 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:53 crc kubenswrapper[4792]: I0121 18:45:53.573758 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:45:53 crc kubenswrapper[4792]: I0121 18:45:53.574873 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:45:53 crc kubenswrapper[4792]: I0121 18:45:53.574977 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" gracePeriod=600 Jan 21 18:45:53 crc kubenswrapper[4792]: E0121 18:45:53.702985 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:45:54 crc kubenswrapper[4792]: I0121 18:45:54.083227 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" exitCode=0 Jan 21 18:45:54 crc kubenswrapper[4792]: I0121 18:45:54.083276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6"} Jan 21 18:45:54 crc kubenswrapper[4792]: I0121 18:45:54.083317 4792 scope.go:117] "RemoveContainer" containerID="a7db2c6b03ac8a6b13df7083f8fac8d0c267c31933f69a8328568ea96affff48" Jan 21 18:45:54 crc kubenswrapper[4792]: I0121 18:45:54.084452 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:45:54 crc kubenswrapper[4792]: E0121 18:45:54.086675 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:46:09 crc kubenswrapper[4792]: I0121 18:46:09.290637 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:46:09 crc kubenswrapper[4792]: E0121 18:46:09.291485 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:46:20 crc kubenswrapper[4792]: I0121 18:46:20.246775 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:46:20 crc kubenswrapper[4792]: E0121 18:46:20.247581 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:46:34 crc kubenswrapper[4792]: I0121 18:46:34.247697 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:46:34 crc kubenswrapper[4792]: E0121 18:46:34.248741 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:46:48 crc kubenswrapper[4792]: I0121 18:46:48.246119 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:46:48 crc kubenswrapper[4792]: E0121 18:46:48.248166 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:47:00 crc kubenswrapper[4792]: I0121 18:47:00.247376 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:47:00 crc kubenswrapper[4792]: E0121 18:47:00.248207 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:47:13 crc kubenswrapper[4792]: I0121 18:47:13.247115 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:47:13 crc kubenswrapper[4792]: E0121 18:47:13.248535 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:47:28 crc kubenswrapper[4792]: I0121 18:47:28.246259 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:47:28 crc kubenswrapper[4792]: E0121 18:47:28.247656 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:47:43 crc kubenswrapper[4792]: I0121 18:47:43.247582 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:47:43 crc kubenswrapper[4792]: E0121 18:47:43.248841 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:47:55 crc kubenswrapper[4792]: I0121 18:47:55.246704 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:47:55 crc kubenswrapper[4792]: E0121 18:47:55.247527 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:48:10 crc kubenswrapper[4792]: I0121 18:48:10.246744 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:48:10 crc kubenswrapper[4792]: E0121 18:48:10.248598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.286326 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:14 crc kubenswrapper[4792]: E0121 18:48:14.287439 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="extract-content" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.287457 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="extract-content" Jan 21 18:48:14 crc kubenswrapper[4792]: E0121 18:48:14.287473 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="extract-utilities" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.287482 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="extract-utilities" Jan 21 18:48:14 crc kubenswrapper[4792]: E0121 18:48:14.287502 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="registry-server" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.287510 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="registry-server" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.287714 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5a40d7c-d615-447d-bd6c-b00ec69e1513" containerName="registry-server" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.288984 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.291460 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.449101 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.449160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwms7\" (UniqueName: \"kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.449253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.550989 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.551051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwms7\" (UniqueName: \"kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.551106 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.551621 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.551634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.573715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwms7\" (UniqueName: \"kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7\") pod \"certified-operators-zwbm5\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.614251 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:14 crc kubenswrapper[4792]: I0121 18:48:14.899192 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:15 crc kubenswrapper[4792]: I0121 18:48:15.344665 4792 generic.go:334] "Generic (PLEG): container finished" podID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerID="54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3" exitCode=0 Jan 21 18:48:15 crc kubenswrapper[4792]: I0121 18:48:15.344974 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerDied","Data":"54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3"} Jan 21 18:48:15 crc kubenswrapper[4792]: I0121 18:48:15.345072 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerStarted","Data":"43f3e51f9f5375c6eb6fd33e1710c625bc72240fd19250a99d0cf3b433b06ece"} Jan 21 18:48:21 crc kubenswrapper[4792]: I0121 18:48:21.393009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerStarted","Data":"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad"} Jan 21 18:48:22 crc kubenswrapper[4792]: I0121 18:48:22.246997 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:48:22 crc kubenswrapper[4792]: E0121 18:48:22.247284 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:48:22 crc kubenswrapper[4792]: I0121 18:48:22.402544 4792 generic.go:334] "Generic (PLEG): container finished" podID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerID="8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad" exitCode=0 Jan 21 18:48:22 crc kubenswrapper[4792]: I0121 18:48:22.402587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerDied","Data":"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad"} Jan 21 18:48:23 crc kubenswrapper[4792]: I0121 18:48:23.413538 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerStarted","Data":"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab"} Jan 21 18:48:23 crc kubenswrapper[4792]: I0121 18:48:23.435612 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zwbm5" podStartSLOduration=1.944754031 podStartE2EDuration="9.435586035s" podCreationTimestamp="2026-01-21 18:48:14 +0000 UTC" firstStartedPulling="2026-01-21 18:48:15.347102118 +0000 UTC m=+3129.329065304" lastFinishedPulling="2026-01-21 18:48:22.837934122 +0000 UTC m=+3136.819897308" observedRunningTime="2026-01-21 18:48:23.432276877 +0000 UTC m=+3137.414240053" watchObservedRunningTime="2026-01-21 18:48:23.435586035 +0000 UTC m=+3137.417549221" Jan 21 18:48:24 crc kubenswrapper[4792]: I0121 18:48:24.615427 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:24 crc kubenswrapper[4792]: I0121 18:48:24.615507 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:24 crc kubenswrapper[4792]: I0121 18:48:24.657168 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:34 crc kubenswrapper[4792]: I0121 18:48:34.664683 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:34 crc kubenswrapper[4792]: I0121 18:48:34.721083 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:35 crc kubenswrapper[4792]: I0121 18:48:35.504272 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zwbm5" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="registry-server" containerID="cri-o://0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab" gracePeriod=2 Jan 21 18:48:35 crc kubenswrapper[4792]: I0121 18:48:35.890057 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.038304 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwms7\" (UniqueName: \"kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7\") pod \"abb6aca9-e976-4b59-98f5-9136676be7ff\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.038500 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities\") pod \"abb6aca9-e976-4b59-98f5-9136676be7ff\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.038616 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content\") pod \"abb6aca9-e976-4b59-98f5-9136676be7ff\" (UID: \"abb6aca9-e976-4b59-98f5-9136676be7ff\") " Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.044867 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7" (OuterVolumeSpecName: "kube-api-access-jwms7") pod "abb6aca9-e976-4b59-98f5-9136676be7ff" (UID: "abb6aca9-e976-4b59-98f5-9136676be7ff"). InnerVolumeSpecName "kube-api-access-jwms7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.061116 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities" (OuterVolumeSpecName: "utilities") pod "abb6aca9-e976-4b59-98f5-9136676be7ff" (UID: "abb6aca9-e976-4b59-98f5-9136676be7ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.098873 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abb6aca9-e976-4b59-98f5-9136676be7ff" (UID: "abb6aca9-e976-4b59-98f5-9136676be7ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.140474 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.140518 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwms7\" (UniqueName: \"kubernetes.io/projected/abb6aca9-e976-4b59-98f5-9136676be7ff-kube-api-access-jwms7\") on node \"crc\" DevicePath \"\"" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.140531 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb6aca9-e976-4b59-98f5-9136676be7ff-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.517210 4792 generic.go:334] "Generic (PLEG): container finished" podID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerID="0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab" exitCode=0 Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.517289 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerDied","Data":"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab"} Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.517317 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwbm5" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.517338 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwbm5" event={"ID":"abb6aca9-e976-4b59-98f5-9136676be7ff","Type":"ContainerDied","Data":"43f3e51f9f5375c6eb6fd33e1710c625bc72240fd19250a99d0cf3b433b06ece"} Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.517366 4792 scope.go:117] "RemoveContainer" containerID="0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.556144 4792 scope.go:117] "RemoveContainer" containerID="8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.556427 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.564706 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zwbm5"] Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.578941 4792 scope.go:117] "RemoveContainer" containerID="54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.616289 4792 scope.go:117] "RemoveContainer" containerID="0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab" Jan 21 18:48:36 crc kubenswrapper[4792]: E0121 18:48:36.616794 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab\": container with ID starting with 0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab not found: ID does not exist" containerID="0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.616828 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab"} err="failed to get container status \"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab\": rpc error: code = NotFound desc = could not find container \"0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab\": container with ID starting with 0c2ea7ff8966991c680ab0ff6d386fc3c4914bf043da4759b82c09caec5f23ab not found: ID does not exist" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.616867 4792 scope.go:117] "RemoveContainer" containerID="8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad" Jan 21 18:48:36 crc kubenswrapper[4792]: E0121 18:48:36.617395 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad\": container with ID starting with 8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad not found: ID does not exist" containerID="8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.617480 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad"} err="failed to get container status \"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad\": rpc error: code = NotFound desc = could not find container \"8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad\": container with ID starting with 8a9a2add7a33d167aa8422afd66a80ab45a78d32bd11d0e26a8957fe98c089ad not found: ID does not exist" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.617532 4792 scope.go:117] "RemoveContainer" containerID="54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3" Jan 21 18:48:36 crc kubenswrapper[4792]: E0121 18:48:36.618367 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3\": container with ID starting with 54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3 not found: ID does not exist" containerID="54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3" Jan 21 18:48:36 crc kubenswrapper[4792]: I0121 18:48:36.618410 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3"} err="failed to get container status \"54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3\": rpc error: code = NotFound desc = could not find container \"54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3\": container with ID starting with 54d1cd958080b74408c89e5b89acc5c27c38138b7fc472e71695f0bfcfd27ac3 not found: ID does not exist" Jan 21 18:48:37 crc kubenswrapper[4792]: I0121 18:48:37.246923 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:48:37 crc kubenswrapper[4792]: E0121 18:48:37.247559 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:48:38 crc kubenswrapper[4792]: I0121 18:48:38.256611 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" path="/var/lib/kubelet/pods/abb6aca9-e976-4b59-98f5-9136676be7ff/volumes" Jan 21 18:48:49 crc kubenswrapper[4792]: I0121 18:48:49.247624 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:48:49 crc kubenswrapper[4792]: E0121 18:48:49.248408 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:49:04 crc kubenswrapper[4792]: I0121 18:49:04.246258 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:49:04 crc kubenswrapper[4792]: E0121 18:49:04.247036 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:49:15 crc kubenswrapper[4792]: I0121 18:49:15.247152 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:49:15 crc kubenswrapper[4792]: E0121 18:49:15.248064 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:49:29 crc kubenswrapper[4792]: I0121 18:49:29.246790 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:49:29 crc kubenswrapper[4792]: E0121 18:49:29.247492 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:49:43 crc kubenswrapper[4792]: I0121 18:49:43.246600 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:49:43 crc kubenswrapper[4792]: E0121 18:49:43.247487 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.842941 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:49:52 crc kubenswrapper[4792]: E0121 18:49:52.844046 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="extract-utilities" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.844065 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="extract-utilities" Jan 21 18:49:52 crc kubenswrapper[4792]: E0121 18:49:52.844075 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="registry-server" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.844081 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="registry-server" Jan 21 18:49:52 crc kubenswrapper[4792]: E0121 18:49:52.844090 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="extract-content" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.844096 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="extract-content" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.844223 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="abb6aca9-e976-4b59-98f5-9136676be7ff" containerName="registry-server" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.844745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:49:52 crc kubenswrapper[4792]: I0121 18:49:52.850311 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.028631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7lb6\" (UniqueName: \"kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6\") pod \"service-telemetry-framework-operators-pzfzf\" (UID: \"6fba917d-2dfc-4fa7-906b-7e4d38f6da46\") " pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.130116 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7lb6\" (UniqueName: \"kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6\") pod \"service-telemetry-framework-operators-pzfzf\" (UID: \"6fba917d-2dfc-4fa7-906b-7e4d38f6da46\") " pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.155294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7lb6\" (UniqueName: \"kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6\") pod \"service-telemetry-framework-operators-pzfzf\" (UID: \"6fba917d-2dfc-4fa7-906b-7e4d38f6da46\") " pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.169239 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.390315 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:49:53 crc kubenswrapper[4792]: I0121 18:49:53.406633 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:49:54 crc kubenswrapper[4792]: I0121 18:49:54.304903 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" event={"ID":"6fba917d-2dfc-4fa7-906b-7e4d38f6da46","Type":"ContainerStarted","Data":"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3"} Jan 21 18:49:54 crc kubenswrapper[4792]: I0121 18:49:54.305307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" event={"ID":"6fba917d-2dfc-4fa7-906b-7e4d38f6da46","Type":"ContainerStarted","Data":"9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0"} Jan 21 18:49:56 crc kubenswrapper[4792]: I0121 18:49:56.251736 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:49:56 crc kubenswrapper[4792]: E0121 18:49:56.252419 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.170209 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.170530 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.205397 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.225424 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" podStartSLOduration=11.108922704 podStartE2EDuration="11.225381078s" podCreationTimestamp="2026-01-21 18:49:52 +0000 UTC" firstStartedPulling="2026-01-21 18:49:53.406267446 +0000 UTC m=+3227.388230632" lastFinishedPulling="2026-01-21 18:49:53.52272582 +0000 UTC m=+3227.504689006" observedRunningTime="2026-01-21 18:49:54.331529076 +0000 UTC m=+3228.313492262" watchObservedRunningTime="2026-01-21 18:50:03.225381078 +0000 UTC m=+3237.207344264" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.400249 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:03 crc kubenswrapper[4792]: I0121 18:50:03.446146 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:50:05 crc kubenswrapper[4792]: I0121 18:50:05.407905 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" podUID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" containerName="registry-server" containerID="cri-o://7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3" gracePeriod=2 Jan 21 18:50:05 crc kubenswrapper[4792]: I0121 18:50:05.796587 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:05 crc kubenswrapper[4792]: I0121 18:50:05.962618 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7lb6\" (UniqueName: \"kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6\") pod \"6fba917d-2dfc-4fa7-906b-7e4d38f6da46\" (UID: \"6fba917d-2dfc-4fa7-906b-7e4d38f6da46\") " Jan 21 18:50:05 crc kubenswrapper[4792]: I0121 18:50:05.968458 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6" (OuterVolumeSpecName: "kube-api-access-m7lb6") pod "6fba917d-2dfc-4fa7-906b-7e4d38f6da46" (UID: "6fba917d-2dfc-4fa7-906b-7e4d38f6da46"). InnerVolumeSpecName "kube-api-access-m7lb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.065127 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7lb6\" (UniqueName: \"kubernetes.io/projected/6fba917d-2dfc-4fa7-906b-7e4d38f6da46-kube-api-access-m7lb6\") on node \"crc\" DevicePath \"\"" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.418179 4792 generic.go:334] "Generic (PLEG): container finished" podID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" containerID="7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3" exitCode=0 Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.418226 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" event={"ID":"6fba917d-2dfc-4fa7-906b-7e4d38f6da46","Type":"ContainerDied","Data":"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3"} Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.418245 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.418267 4792 scope.go:117] "RemoveContainer" containerID="7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.418255 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-pzfzf" event={"ID":"6fba917d-2dfc-4fa7-906b-7e4d38f6da46","Type":"ContainerDied","Data":"9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0"} Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.442611 4792 scope.go:117] "RemoveContainer" containerID="7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3" Jan 21 18:50:06 crc kubenswrapper[4792]: E0121 18:50:06.443121 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3\": container with ID starting with 7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3 not found: ID does not exist" containerID="7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.443161 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3"} err="failed to get container status \"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3\": rpc error: code = NotFound desc = could not find container \"7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3\": container with ID starting with 7c079cc575ebb404ba5e184d1b1bbb98019e44372ee415710f6dedb5eeb56ad3 not found: ID does not exist" Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.443787 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:50:06 crc kubenswrapper[4792]: I0121 18:50:06.449236 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-pzfzf"] Jan 21 18:50:07 crc kubenswrapper[4792]: E0121 18:50:07.724131 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache]" Jan 21 18:50:08 crc kubenswrapper[4792]: I0121 18:50:08.252928 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:50:08 crc kubenswrapper[4792]: E0121 18:50:08.253143 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:50:08 crc kubenswrapper[4792]: I0121 18:50:08.268764 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" path="/var/lib/kubelet/pods/6fba917d-2dfc-4fa7-906b-7e4d38f6da46/volumes" Jan 21 18:50:17 crc kubenswrapper[4792]: E0121 18:50:17.896339 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache]" Jan 21 18:50:22 crc kubenswrapper[4792]: I0121 18:50:22.246923 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:50:22 crc kubenswrapper[4792]: E0121 18:50:22.247280 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:50:28 crc kubenswrapper[4792]: E0121 18:50:28.115226 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache]" Jan 21 18:50:37 crc kubenswrapper[4792]: I0121 18:50:37.247134 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:50:37 crc kubenswrapper[4792]: E0121 18:50:37.247898 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:50:38 crc kubenswrapper[4792]: E0121 18:50:38.313120 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache]" Jan 21 18:50:48 crc kubenswrapper[4792]: E0121 18:50:48.487404 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache]" Jan 21 18:50:51 crc kubenswrapper[4792]: I0121 18:50:51.247253 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:50:51 crc kubenswrapper[4792]: E0121 18:50:51.247889 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:50:58 crc kubenswrapper[4792]: E0121 18:50:58.644089 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice/crio-9ea712e0a8c001792683419aafe1c16155f1a2505f940b839e56bdc43df669d0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fba917d_2dfc_4fa7_906b_7e4d38f6da46.slice\": RecentStats: unable to find data in memory cache]" Jan 21 18:51:02 crc kubenswrapper[4792]: I0121 18:51:02.248231 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:51:02 crc kubenswrapper[4792]: I0121 18:51:02.909306 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da"} Jan 21 18:51:06 crc kubenswrapper[4792]: E0121 18:51:06.283404 4792 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/e5273c3832942c830baaf28ec53557e967d2a01e718c0e610e96a9e6a0113de9/diff" to get inode usage: stat /var/lib/containers/storage/overlay/e5273c3832942c830baaf28ec53557e967d2a01e718c0e610e96a9e6a0113de9/diff: no such file or directory, extraDiskErr: Jan 21 18:53:23 crc kubenswrapper[4792]: I0121 18:53:23.570592 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:53:23 crc kubenswrapper[4792]: I0121 18:53:23.571191 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:53:43 crc kubenswrapper[4792]: I0121 18:53:43.830730 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:53:43 crc kubenswrapper[4792]: E0121 18:53:43.832922 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" containerName="registry-server" Jan 21 18:53:43 crc kubenswrapper[4792]: I0121 18:53:43.833056 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" containerName="registry-server" Jan 21 18:53:43 crc kubenswrapper[4792]: I0121 18:53:43.833258 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fba917d-2dfc-4fa7-906b-7e4d38f6da46" containerName="registry-server" Jan 21 18:53:43 crc kubenswrapper[4792]: I0121 18:53:43.847248 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:43 crc kubenswrapper[4792]: I0121 18:53:43.851869 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.005054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9hls\" (UniqueName: \"kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.005513 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.005622 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.106314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.106760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.106941 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9hls\" (UniqueName: \"kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.107151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.108194 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.142350 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9hls\" (UniqueName: \"kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls\") pod \"redhat-operators-k6dsg\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.171163 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:44 crc kubenswrapper[4792]: I0121 18:53:44.414461 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:53:45 crc kubenswrapper[4792]: I0121 18:53:45.205489 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerDied","Data":"a2e14ffbc8ffe87504637c01e7ad5dfdd07afd593be9eb68b7607f93db0ce712"} Jan 21 18:53:45 crc kubenswrapper[4792]: I0121 18:53:45.205288 4792 generic.go:334] "Generic (PLEG): container finished" podID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerID="a2e14ffbc8ffe87504637c01e7ad5dfdd07afd593be9eb68b7607f93db0ce712" exitCode=0 Jan 21 18:53:45 crc kubenswrapper[4792]: I0121 18:53:45.206528 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerStarted","Data":"9b282b530575327f15904ad50b9e8c382f419d4632b939a8e9552f097874a995"} Jan 21 18:53:46 crc kubenswrapper[4792]: I0121 18:53:46.227235 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerStarted","Data":"c13af7f81ddd045e273dfef44f65bea923c66b8a64653f01c1770e6e5e52d8dd"} Jan 21 18:53:47 crc kubenswrapper[4792]: I0121 18:53:47.236376 4792 generic.go:334] "Generic (PLEG): container finished" podID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerID="c13af7f81ddd045e273dfef44f65bea923c66b8a64653f01c1770e6e5e52d8dd" exitCode=0 Jan 21 18:53:47 crc kubenswrapper[4792]: I0121 18:53:47.236532 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerDied","Data":"c13af7f81ddd045e273dfef44f65bea923c66b8a64653f01c1770e6e5e52d8dd"} Jan 21 18:53:48 crc kubenswrapper[4792]: I0121 18:53:48.254712 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerStarted","Data":"d2169d475b946fa6c4878d071fd248b09f67f8dfeac39fd8dd58705e4ccf9e1b"} Jan 21 18:53:48 crc kubenswrapper[4792]: I0121 18:53:48.279812 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k6dsg" podStartSLOduration=2.6428134009999997 podStartE2EDuration="5.279793453s" podCreationTimestamp="2026-01-21 18:53:43 +0000 UTC" firstStartedPulling="2026-01-21 18:53:45.207565197 +0000 UTC m=+3459.189528383" lastFinishedPulling="2026-01-21 18:53:47.844545249 +0000 UTC m=+3461.826508435" observedRunningTime="2026-01-21 18:53:48.275587188 +0000 UTC m=+3462.257550384" watchObservedRunningTime="2026-01-21 18:53:48.279793453 +0000 UTC m=+3462.261756639" Jan 21 18:53:53 crc kubenswrapper[4792]: I0121 18:53:53.570875 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:53:53 crc kubenswrapper[4792]: I0121 18:53:53.571806 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:53:54 crc kubenswrapper[4792]: I0121 18:53:54.171584 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:54 crc kubenswrapper[4792]: I0121 18:53:54.171691 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:54 crc kubenswrapper[4792]: I0121 18:53:54.257254 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:54 crc kubenswrapper[4792]: I0121 18:53:54.339599 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:54 crc kubenswrapper[4792]: I0121 18:53:54.508633 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:53:56 crc kubenswrapper[4792]: I0121 18:53:56.336815 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k6dsg" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="registry-server" containerID="cri-o://d2169d475b946fa6c4878d071fd248b09f67f8dfeac39fd8dd58705e4ccf9e1b" gracePeriod=2 Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.363052 4792 generic.go:334] "Generic (PLEG): container finished" podID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerID="d2169d475b946fa6c4878d071fd248b09f67f8dfeac39fd8dd58705e4ccf9e1b" exitCode=0 Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.363155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerDied","Data":"d2169d475b946fa6c4878d071fd248b09f67f8dfeac39fd8dd58705e4ccf9e1b"} Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.461554 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.659834 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9hls\" (UniqueName: \"kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls\") pod \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.661211 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities\") pod \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.661381 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content\") pod \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\" (UID: \"b3ffb09e-da92-49e5-b4b3-9a22324bc8de\") " Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.662465 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities" (OuterVolumeSpecName: "utilities") pod "b3ffb09e-da92-49e5-b4b3-9a22324bc8de" (UID: "b3ffb09e-da92-49e5-b4b3-9a22324bc8de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.662802 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.668391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls" (OuterVolumeSpecName: "kube-api-access-w9hls") pod "b3ffb09e-da92-49e5-b4b3-9a22324bc8de" (UID: "b3ffb09e-da92-49e5-b4b3-9a22324bc8de"). InnerVolumeSpecName "kube-api-access-w9hls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.765609 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9hls\" (UniqueName: \"kubernetes.io/projected/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-kube-api-access-w9hls\") on node \"crc\" DevicePath \"\"" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.822160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3ffb09e-da92-49e5-b4b3-9a22324bc8de" (UID: "b3ffb09e-da92-49e5-b4b3-9a22324bc8de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:53:59 crc kubenswrapper[4792]: I0121 18:53:59.867273 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3ffb09e-da92-49e5-b4b3-9a22324bc8de-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.373554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6dsg" event={"ID":"b3ffb09e-da92-49e5-b4b3-9a22324bc8de","Type":"ContainerDied","Data":"9b282b530575327f15904ad50b9e8c382f419d4632b939a8e9552f097874a995"} Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.374081 4792 scope.go:117] "RemoveContainer" containerID="d2169d475b946fa6c4878d071fd248b09f67f8dfeac39fd8dd58705e4ccf9e1b" Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.373613 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6dsg" Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.406883 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.407224 4792 scope.go:117] "RemoveContainer" containerID="c13af7f81ddd045e273dfef44f65bea923c66b8a64653f01c1770e6e5e52d8dd" Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.415220 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k6dsg"] Jan 21 18:54:00 crc kubenswrapper[4792]: I0121 18:54:00.427339 4792 scope.go:117] "RemoveContainer" containerID="a2e14ffbc8ffe87504637c01e7ad5dfdd07afd593be9eb68b7607f93db0ce712" Jan 21 18:54:02 crc kubenswrapper[4792]: I0121 18:54:02.258452 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" path="/var/lib/kubelet/pods/b3ffb09e-da92-49e5-b4b3-9a22324bc8de/volumes" Jan 21 18:54:23 crc kubenswrapper[4792]: I0121 18:54:23.570359 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:54:23 crc kubenswrapper[4792]: I0121 18:54:23.571197 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:54:23 crc kubenswrapper[4792]: I0121 18:54:23.571266 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:54:23 crc kubenswrapper[4792]: I0121 18:54:23.572150 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:54:23 crc kubenswrapper[4792]: I0121 18:54:23.572201 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da" gracePeriod=600 Jan 21 18:54:24 crc kubenswrapper[4792]: I0121 18:54:24.585946 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da" exitCode=0 Jan 21 18:54:24 crc kubenswrapper[4792]: I0121 18:54:24.585990 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da"} Jan 21 18:54:24 crc kubenswrapper[4792]: I0121 18:54:24.586326 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505"} Jan 21 18:54:24 crc kubenswrapper[4792]: I0121 18:54:24.586347 4792 scope.go:117] "RemoveContainer" containerID="8759cff326b1cb0645852a854f68994affffda3477c6b5c3c3e0b7f8b72b45d6" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.546618 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:26 crc kubenswrapper[4792]: E0121 18:55:26.547619 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="extract-content" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.547632 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="extract-content" Jan 21 18:55:26 crc kubenswrapper[4792]: E0121 18:55:26.547656 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="registry-server" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.547663 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="registry-server" Jan 21 18:55:26 crc kubenswrapper[4792]: E0121 18:55:26.547685 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="extract-utilities" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.547692 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="extract-utilities" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.548000 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ffb09e-da92-49e5-b4b3-9a22324bc8de" containerName="registry-server" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.550173 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.563223 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.598162 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.598235 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.598325 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49xjv\" (UniqueName: \"kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.700311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49xjv\" (UniqueName: \"kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.700487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.700508 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.701194 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.701238 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.737149 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49xjv\" (UniqueName: \"kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv\") pod \"community-operators-h28pm\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:26 crc kubenswrapper[4792]: I0121 18:55:26.883259 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:27 crc kubenswrapper[4792]: I0121 18:55:27.532230 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:28 crc kubenswrapper[4792]: I0121 18:55:28.120187 4792 generic.go:334] "Generic (PLEG): container finished" podID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerID="18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441" exitCode=0 Jan 21 18:55:28 crc kubenswrapper[4792]: I0121 18:55:28.120281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerDied","Data":"18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441"} Jan 21 18:55:28 crc kubenswrapper[4792]: I0121 18:55:28.120653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerStarted","Data":"134c4cb9af8bd37ecf952a5b10ce0def3f83b93380e755aa1c46cb35c5efa8e1"} Jan 21 18:55:28 crc kubenswrapper[4792]: I0121 18:55:28.123058 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:55:29 crc kubenswrapper[4792]: I0121 18:55:29.130044 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerStarted","Data":"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975"} Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.142029 4792 generic.go:334] "Generic (PLEG): container finished" podID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerID="ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975" exitCode=0 Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.142096 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerDied","Data":"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975"} Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.524300 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.526497 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.563330 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.650980 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxrt8\" (UniqueName: \"kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8\") pod \"service-telemetry-framework-operators-x4lxj\" (UID: \"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d\") " pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.753126 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxrt8\" (UniqueName: \"kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8\") pod \"service-telemetry-framework-operators-x4lxj\" (UID: \"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d\") " pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.783641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxrt8\" (UniqueName: \"kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8\") pod \"service-telemetry-framework-operators-x4lxj\" (UID: \"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d\") " pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:30 crc kubenswrapper[4792]: I0121 18:55:30.860073 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:31 crc kubenswrapper[4792]: I0121 18:55:31.092668 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:31 crc kubenswrapper[4792]: W0121 18:55:31.103026 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b65c664_2f16_4ef2_b1a1_73e3f0ffc33d.slice/crio-25fce65d0a4fe77245a3e28950651ed31847e0f9a93f50e656bac87f41a02104 WatchSource:0}: Error finding container 25fce65d0a4fe77245a3e28950651ed31847e0f9a93f50e656bac87f41a02104: Status 404 returned error can't find the container with id 25fce65d0a4fe77245a3e28950651ed31847e0f9a93f50e656bac87f41a02104 Jan 21 18:55:31 crc kubenswrapper[4792]: I0121 18:55:31.153213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerStarted","Data":"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c"} Jan 21 18:55:31 crc kubenswrapper[4792]: I0121 18:55:31.156019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" event={"ID":"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d","Type":"ContainerStarted","Data":"25fce65d0a4fe77245a3e28950651ed31847e0f9a93f50e656bac87f41a02104"} Jan 21 18:55:31 crc kubenswrapper[4792]: I0121 18:55:31.177616 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h28pm" podStartSLOduration=2.501136635 podStartE2EDuration="5.177598064s" podCreationTimestamp="2026-01-21 18:55:26 +0000 UTC" firstStartedPulling="2026-01-21 18:55:28.122774301 +0000 UTC m=+3562.104737487" lastFinishedPulling="2026-01-21 18:55:30.79923573 +0000 UTC m=+3564.781198916" observedRunningTime="2026-01-21 18:55:31.173667879 +0000 UTC m=+3565.155631085" watchObservedRunningTime="2026-01-21 18:55:31.177598064 +0000 UTC m=+3565.159561240" Jan 21 18:55:32 crc kubenswrapper[4792]: I0121 18:55:32.166119 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" event={"ID":"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d","Type":"ContainerStarted","Data":"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867"} Jan 21 18:55:32 crc kubenswrapper[4792]: I0121 18:55:32.188911 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" podStartSLOduration=2.060671504 podStartE2EDuration="2.188885241s" podCreationTimestamp="2026-01-21 18:55:30 +0000 UTC" firstStartedPulling="2026-01-21 18:55:31.105423203 +0000 UTC m=+3565.087386389" lastFinishedPulling="2026-01-21 18:55:31.23363694 +0000 UTC m=+3565.215600126" observedRunningTime="2026-01-21 18:55:32.186337372 +0000 UTC m=+3566.168300558" watchObservedRunningTime="2026-01-21 18:55:32.188885241 +0000 UTC m=+3566.170848427" Jan 21 18:55:36 crc kubenswrapper[4792]: I0121 18:55:36.883548 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:36 crc kubenswrapper[4792]: I0121 18:55:36.884102 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:37 crc kubenswrapper[4792]: I0121 18:55:37.008478 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:37 crc kubenswrapper[4792]: I0121 18:55:37.240217 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:39 crc kubenswrapper[4792]: I0121 18:55:39.509294 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:39 crc kubenswrapper[4792]: I0121 18:55:39.509811 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h28pm" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="registry-server" containerID="cri-o://da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c" gracePeriod=2 Jan 21 18:55:40 crc kubenswrapper[4792]: I0121 18:55:40.860893 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:40 crc kubenswrapper[4792]: I0121 18:55:40.861168 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:40 crc kubenswrapper[4792]: I0121 18:55:40.904432 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.004567 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.152557 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities\") pod \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.153586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities" (OuterVolumeSpecName: "utilities") pod "386d0c5b-e1e5-4c77-8c28-f064a7585e20" (UID: "386d0c5b-e1e5-4c77-8c28-f064a7585e20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.153693 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content\") pod \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.167512 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49xjv\" (UniqueName: \"kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv\") pod \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\" (UID: \"386d0c5b-e1e5-4c77-8c28-f064a7585e20\") " Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.167984 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.174129 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv" (OuterVolumeSpecName: "kube-api-access-49xjv") pod "386d0c5b-e1e5-4c77-8c28-f064a7585e20" (UID: "386d0c5b-e1e5-4c77-8c28-f064a7585e20"). InnerVolumeSpecName "kube-api-access-49xjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.223233 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "386d0c5b-e1e5-4c77-8c28-f064a7585e20" (UID: "386d0c5b-e1e5-4c77-8c28-f064a7585e20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.235522 4792 generic.go:334] "Generic (PLEG): container finished" podID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerID="da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c" exitCode=0 Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.235640 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h28pm" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.235641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerDied","Data":"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c"} Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.235738 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h28pm" event={"ID":"386d0c5b-e1e5-4c77-8c28-f064a7585e20","Type":"ContainerDied","Data":"134c4cb9af8bd37ecf952a5b10ce0def3f83b93380e755aa1c46cb35c5efa8e1"} Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.235773 4792 scope.go:117] "RemoveContainer" containerID="da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.263387 4792 scope.go:117] "RemoveContainer" containerID="ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.271254 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/386d0c5b-e1e5-4c77-8c28-f064a7585e20-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.271290 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49xjv\" (UniqueName: \"kubernetes.io/projected/386d0c5b-e1e5-4c77-8c28-f064a7585e20-kube-api-access-49xjv\") on node \"crc\" DevicePath \"\"" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.276727 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.280227 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.285886 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h28pm"] Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.294376 4792 scope.go:117] "RemoveContainer" containerID="18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.318482 4792 scope.go:117] "RemoveContainer" containerID="da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c" Jan 21 18:55:41 crc kubenswrapper[4792]: E0121 18:55:41.319286 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c\": container with ID starting with da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c not found: ID does not exist" containerID="da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.319327 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c"} err="failed to get container status \"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c\": rpc error: code = NotFound desc = could not find container \"da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c\": container with ID starting with da6b7987c6d3a5ed0e0d804db53153e4f49907e33c7058798f5915692f35f81c not found: ID does not exist" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.319357 4792 scope.go:117] "RemoveContainer" containerID="ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975" Jan 21 18:55:41 crc kubenswrapper[4792]: E0121 18:55:41.319808 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975\": container with ID starting with ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975 not found: ID does not exist" containerID="ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.319885 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975"} err="failed to get container status \"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975\": rpc error: code = NotFound desc = could not find container \"ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975\": container with ID starting with ceff8563ab8e13fe2d563693b8224faf4e967d4845279961bbc1a4238bfc1975 not found: ID does not exist" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.319906 4792 scope.go:117] "RemoveContainer" containerID="18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441" Jan 21 18:55:41 crc kubenswrapper[4792]: E0121 18:55:41.320339 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441\": container with ID starting with 18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441 not found: ID does not exist" containerID="18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441" Jan 21 18:55:41 crc kubenswrapper[4792]: I0121 18:55:41.320364 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441"} err="failed to get container status \"18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441\": rpc error: code = NotFound desc = could not find container \"18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441\": container with ID starting with 18bedf16aed1962af2a26d4d32ed3572a5c503f2b55239ec96b65c844b089441 not found: ID does not exist" Jan 21 18:55:42 crc kubenswrapper[4792]: I0121 18:55:42.265963 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" path="/var/lib/kubelet/pods/386d0c5b-e1e5-4c77-8c28-f064a7585e20/volumes" Jan 21 18:55:45 crc kubenswrapper[4792]: I0121 18:55:45.715449 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:45 crc kubenswrapper[4792]: I0121 18:55:45.715803 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" podUID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" containerName="registry-server" containerID="cri-o://1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867" gracePeriod=2 Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.149836 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.250551 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxrt8\" (UniqueName: \"kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8\") pod \"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d\" (UID: \"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d\") " Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.270532 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8" (OuterVolumeSpecName: "kube-api-access-bxrt8") pod "7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" (UID: "7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d"). InnerVolumeSpecName "kube-api-access-bxrt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.288080 4792 generic.go:334] "Generic (PLEG): container finished" podID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" containerID="1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867" exitCode=0 Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.288147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" event={"ID":"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d","Type":"ContainerDied","Data":"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867"} Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.288184 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.288208 4792 scope.go:117] "RemoveContainer" containerID="1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.288188 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-x4lxj" event={"ID":"7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d","Type":"ContainerDied","Data":"25fce65d0a4fe77245a3e28950651ed31847e0f9a93f50e656bac87f41a02104"} Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.313026 4792 scope.go:117] "RemoveContainer" containerID="1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867" Jan 21 18:55:46 crc kubenswrapper[4792]: E0121 18:55:46.313744 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867\": container with ID starting with 1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867 not found: ID does not exist" containerID="1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.313838 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867"} err="failed to get container status \"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867\": rpc error: code = NotFound desc = could not find container \"1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867\": container with ID starting with 1724025b75d06497e8d0d87c5afffee2ec2874f390cfc631505993ec442e7867 not found: ID does not exist" Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.338725 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.345362 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-x4lxj"] Jan 21 18:55:46 crc kubenswrapper[4792]: I0121 18:55:46.353945 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxrt8\" (UniqueName: \"kubernetes.io/projected/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d-kube-api-access-bxrt8\") on node \"crc\" DevicePath \"\"" Jan 21 18:55:48 crc kubenswrapper[4792]: I0121 18:55:48.281964 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" path="/var/lib/kubelet/pods/7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d/volumes" Jan 21 18:56:23 crc kubenswrapper[4792]: I0121 18:56:23.571742 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:56:23 crc kubenswrapper[4792]: I0121 18:56:23.572455 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:56:53 crc kubenswrapper[4792]: I0121 18:56:53.570821 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:56:53 crc kubenswrapper[4792]: I0121 18:56:53.571565 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:57:23 crc kubenswrapper[4792]: I0121 18:57:23.570727 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:57:23 crc kubenswrapper[4792]: I0121 18:57:23.571268 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:57:23 crc kubenswrapper[4792]: I0121 18:57:23.571318 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 18:57:23 crc kubenswrapper[4792]: I0121 18:57:23.571979 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:57:23 crc kubenswrapper[4792]: I0121 18:57:23.572031 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" gracePeriod=600 Jan 21 18:57:23 crc kubenswrapper[4792]: E0121 18:57:23.707154 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:57:24 crc kubenswrapper[4792]: I0121 18:57:24.140047 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" exitCode=0 Jan 21 18:57:24 crc kubenswrapper[4792]: I0121 18:57:24.140096 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505"} Jan 21 18:57:24 crc kubenswrapper[4792]: I0121 18:57:24.140156 4792 scope.go:117] "RemoveContainer" containerID="2ae5031e31af34a83cc266d378bcb929d6894824e12d0af9fda531402379b4da" Jan 21 18:57:24 crc kubenswrapper[4792]: I0121 18:57:24.140798 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:57:24 crc kubenswrapper[4792]: E0121 18:57:24.141061 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:57:39 crc kubenswrapper[4792]: I0121 18:57:39.247188 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:57:39 crc kubenswrapper[4792]: E0121 18:57:39.248220 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:57:54 crc kubenswrapper[4792]: I0121 18:57:54.247425 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:57:54 crc kubenswrapper[4792]: E0121 18:57:54.248214 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:58:05 crc kubenswrapper[4792]: I0121 18:58:05.247488 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:58:05 crc kubenswrapper[4792]: E0121 18:58:05.248543 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:58:18 crc kubenswrapper[4792]: I0121 18:58:18.247012 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:58:18 crc kubenswrapper[4792]: E0121 18:58:18.247689 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:58:32 crc kubenswrapper[4792]: I0121 18:58:32.250130 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:58:32 crc kubenswrapper[4792]: E0121 18:58:32.250749 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:58:45 crc kubenswrapper[4792]: I0121 18:58:45.247763 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:58:45 crc kubenswrapper[4792]: E0121 18:58:45.248336 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:58:56 crc kubenswrapper[4792]: I0121 18:58:56.252586 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:58:56 crc kubenswrapper[4792]: E0121 18:58:56.255064 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:59:09 crc kubenswrapper[4792]: I0121 18:59:09.246507 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:59:09 crc kubenswrapper[4792]: E0121 18:59:09.247186 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.913103 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:12 crc kubenswrapper[4792]: E0121 18:59:12.914032 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="extract-content" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914053 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="extract-content" Jan 21 18:59:12 crc kubenswrapper[4792]: E0121 18:59:12.914074 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="extract-utilities" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914082 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="extract-utilities" Jan 21 18:59:12 crc kubenswrapper[4792]: E0121 18:59:12.914097 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914105 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: E0121 18:59:12.914118 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914126 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914292 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b65c664-2f16-4ef2-b1a1-73e3f0ffc33d" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.914312 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="386d0c5b-e1e5-4c77-8c28-f064a7585e20" containerName="registry-server" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.915485 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:12 crc kubenswrapper[4792]: I0121 18:59:12.949498 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.070563 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.070695 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.070727 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rscfq\" (UniqueName: \"kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.172107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.172202 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.172226 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rscfq\" (UniqueName: \"kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.172648 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.172705 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.203241 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rscfq\" (UniqueName: \"kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq\") pod \"certified-operators-zclvs\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.240663 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:13 crc kubenswrapper[4792]: I0121 18:59:13.768347 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:14 crc kubenswrapper[4792]: I0121 18:59:14.104437 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerID="51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8" exitCode=0 Jan 21 18:59:14 crc kubenswrapper[4792]: I0121 18:59:14.104493 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerDied","Data":"51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8"} Jan 21 18:59:14 crc kubenswrapper[4792]: I0121 18:59:14.104550 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerStarted","Data":"afb68b972fe0cdafc13b452d3692d2aa25f4dc01cd1ba6808ff68629f2ae9444"} Jan 21 18:59:15 crc kubenswrapper[4792]: I0121 18:59:15.113228 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerStarted","Data":"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8"} Jan 21 18:59:16 crc kubenswrapper[4792]: I0121 18:59:16.132479 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerID="3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8" exitCode=0 Jan 21 18:59:16 crc kubenswrapper[4792]: I0121 18:59:16.132525 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerDied","Data":"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8"} Jan 21 18:59:17 crc kubenswrapper[4792]: I0121 18:59:17.159677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerStarted","Data":"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555"} Jan 21 18:59:22 crc kubenswrapper[4792]: I0121 18:59:22.247541 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:59:22 crc kubenswrapper[4792]: E0121 18:59:22.249033 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:59:23 crc kubenswrapper[4792]: I0121 18:59:23.242701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:23 crc kubenswrapper[4792]: I0121 18:59:23.242802 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:23 crc kubenswrapper[4792]: I0121 18:59:23.295584 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:23 crc kubenswrapper[4792]: I0121 18:59:23.316237 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zclvs" podStartSLOduration=8.860346392 podStartE2EDuration="11.316217304s" podCreationTimestamp="2026-01-21 18:59:12 +0000 UTC" firstStartedPulling="2026-01-21 18:59:14.106300734 +0000 UTC m=+3788.088263920" lastFinishedPulling="2026-01-21 18:59:16.562171636 +0000 UTC m=+3790.544134832" observedRunningTime="2026-01-21 18:59:17.183716849 +0000 UTC m=+3791.165680035" watchObservedRunningTime="2026-01-21 18:59:23.316217304 +0000 UTC m=+3797.298180490" Jan 21 18:59:24 crc kubenswrapper[4792]: I0121 18:59:24.255100 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:24 crc kubenswrapper[4792]: I0121 18:59:24.302611 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:26 crc kubenswrapper[4792]: I0121 18:59:26.217971 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zclvs" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="registry-server" containerID="cri-o://1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555" gracePeriod=2 Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.062813 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.235382 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rscfq\" (UniqueName: \"kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq\") pod \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.235439 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content\") pod \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.235470 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities\") pod \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\" (UID: \"eb0fa42f-4c89-40dd-a140-d5f6cb34199f\") " Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.239308 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities" (OuterVolumeSpecName: "utilities") pod "eb0fa42f-4c89-40dd-a140-d5f6cb34199f" (UID: "eb0fa42f-4c89-40dd-a140-d5f6cb34199f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.240891 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerID="1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555" exitCode=0 Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.240966 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerDied","Data":"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555"} Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.241004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zclvs" event={"ID":"eb0fa42f-4c89-40dd-a140-d5f6cb34199f","Type":"ContainerDied","Data":"afb68b972fe0cdafc13b452d3692d2aa25f4dc01cd1ba6808ff68629f2ae9444"} Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.241040 4792 scope.go:117] "RemoveContainer" containerID="1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.241279 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zclvs" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.271584 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq" (OuterVolumeSpecName: "kube-api-access-rscfq") pod "eb0fa42f-4c89-40dd-a140-d5f6cb34199f" (UID: "eb0fa42f-4c89-40dd-a140-d5f6cb34199f"). InnerVolumeSpecName "kube-api-access-rscfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.286262 4792 scope.go:117] "RemoveContainer" containerID="3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.297705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb0fa42f-4c89-40dd-a140-d5f6cb34199f" (UID: "eb0fa42f-4c89-40dd-a140-d5f6cb34199f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.323203 4792 scope.go:117] "RemoveContainer" containerID="51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.337758 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rscfq\" (UniqueName: \"kubernetes.io/projected/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-kube-api-access-rscfq\") on node \"crc\" DevicePath \"\"" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.337805 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.337821 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb0fa42f-4c89-40dd-a140-d5f6cb34199f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.339692 4792 scope.go:117] "RemoveContainer" containerID="1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555" Jan 21 18:59:27 crc kubenswrapper[4792]: E0121 18:59:27.340124 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555\": container with ID starting with 1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555 not found: ID does not exist" containerID="1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.340163 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555"} err="failed to get container status \"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555\": rpc error: code = NotFound desc = could not find container \"1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555\": container with ID starting with 1c217522222904b7bd1dca4acb1a41c1a76e533799259ecb962b8cf690d1d555 not found: ID does not exist" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.340191 4792 scope.go:117] "RemoveContainer" containerID="3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8" Jan 21 18:59:27 crc kubenswrapper[4792]: E0121 18:59:27.340490 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8\": container with ID starting with 3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8 not found: ID does not exist" containerID="3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.340512 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8"} err="failed to get container status \"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8\": rpc error: code = NotFound desc = could not find container \"3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8\": container with ID starting with 3de57ac3b256dca77fbeb563e55746cf86e98f04e25e2ce5bfca912d78a0f8a8 not found: ID does not exist" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.340526 4792 scope.go:117] "RemoveContainer" containerID="51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8" Jan 21 18:59:27 crc kubenswrapper[4792]: E0121 18:59:27.340872 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8\": container with ID starting with 51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8 not found: ID does not exist" containerID="51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.340899 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8"} err="failed to get container status \"51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8\": rpc error: code = NotFound desc = could not find container \"51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8\": container with ID starting with 51ae57c84401aa30fd1fc113bab22cfb079830ea02d361050e48d35f69ca4fe8 not found: ID does not exist" Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.579054 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:27 crc kubenswrapper[4792]: I0121 18:59:27.586323 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zclvs"] Jan 21 18:59:28 crc kubenswrapper[4792]: I0121 18:59:28.254696 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" path="/var/lib/kubelet/pods/eb0fa42f-4c89-40dd-a140-d5f6cb34199f/volumes" Jan 21 18:59:36 crc kubenswrapper[4792]: I0121 18:59:36.253517 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:59:36 crc kubenswrapper[4792]: E0121 18:59:36.254313 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 18:59:48 crc kubenswrapper[4792]: I0121 18:59:48.247110 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 18:59:48 crc kubenswrapper[4792]: E0121 18:59:48.247801 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.161528 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c"] Jan 21 19:00:00 crc kubenswrapper[4792]: E0121 19:00:00.162334 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="extract-utilities" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.162346 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="extract-utilities" Jan 21 19:00:00 crc kubenswrapper[4792]: E0121 19:00:00.162355 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="extract-content" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.162361 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="extract-content" Jan 21 19:00:00 crc kubenswrapper[4792]: E0121 19:00:00.162371 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.162377 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.162494 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb0fa42f-4c89-40dd-a140-d5f6cb34199f" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.162955 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.165180 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.165825 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.183082 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c"] Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.247349 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:00:00 crc kubenswrapper[4792]: E0121 19:00:00.247633 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.317473 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.317543 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.317702 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsdkg\" (UniqueName: \"kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.419692 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.419771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsdkg\" (UniqueName: \"kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.419883 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.420648 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.440062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsdkg\" (UniqueName: \"kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.440694 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume\") pod \"collect-profiles-29483700-gt66c\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.487673 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:00 crc kubenswrapper[4792]: I0121 19:00:00.915801 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c"] Jan 21 19:00:01 crc kubenswrapper[4792]: I0121 19:00:01.484820 4792 generic.go:334] "Generic (PLEG): container finished" podID="c665db87-7eb0-4414-8e85-4d1643ebec60" containerID="21c0642dcd5f4d537bd5013e60872248686462f4976e2ca83bd2d2a15e9ce49a" exitCode=0 Jan 21 19:00:01 crc kubenswrapper[4792]: I0121 19:00:01.484901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" event={"ID":"c665db87-7eb0-4414-8e85-4d1643ebec60","Type":"ContainerDied","Data":"21c0642dcd5f4d537bd5013e60872248686462f4976e2ca83bd2d2a15e9ce49a"} Jan 21 19:00:01 crc kubenswrapper[4792]: I0121 19:00:01.485185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" event={"ID":"c665db87-7eb0-4414-8e85-4d1643ebec60","Type":"ContainerStarted","Data":"7664698d225a1530ae695413509a2d2017dceb87a7aeaee778be853dfac79322"} Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.733587 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.852278 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume\") pod \"c665db87-7eb0-4414-8e85-4d1643ebec60\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.852590 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume\") pod \"c665db87-7eb0-4414-8e85-4d1643ebec60\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.852773 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsdkg\" (UniqueName: \"kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg\") pod \"c665db87-7eb0-4414-8e85-4d1643ebec60\" (UID: \"c665db87-7eb0-4414-8e85-4d1643ebec60\") " Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.854308 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume" (OuterVolumeSpecName: "config-volume") pod "c665db87-7eb0-4414-8e85-4d1643ebec60" (UID: "c665db87-7eb0-4414-8e85-4d1643ebec60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.858101 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg" (OuterVolumeSpecName: "kube-api-access-nsdkg") pod "c665db87-7eb0-4414-8e85-4d1643ebec60" (UID: "c665db87-7eb0-4414-8e85-4d1643ebec60"). InnerVolumeSpecName "kube-api-access-nsdkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.858182 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c665db87-7eb0-4414-8e85-4d1643ebec60" (UID: "c665db87-7eb0-4414-8e85-4d1643ebec60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.954500 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c665db87-7eb0-4414-8e85-4d1643ebec60-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.954533 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c665db87-7eb0-4414-8e85-4d1643ebec60-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:02 crc kubenswrapper[4792]: I0121 19:00:02.954544 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsdkg\" (UniqueName: \"kubernetes.io/projected/c665db87-7eb0-4414-8e85-4d1643ebec60-kube-api-access-nsdkg\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:03 crc kubenswrapper[4792]: I0121 19:00:03.498800 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" event={"ID":"c665db87-7eb0-4414-8e85-4d1643ebec60","Type":"ContainerDied","Data":"7664698d225a1530ae695413509a2d2017dceb87a7aeaee778be853dfac79322"} Jan 21 19:00:03 crc kubenswrapper[4792]: I0121 19:00:03.498839 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7664698d225a1530ae695413509a2d2017dceb87a7aeaee778be853dfac79322" Jan 21 19:00:03 crc kubenswrapper[4792]: I0121 19:00:03.499520 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-gt66c" Jan 21 19:00:03 crc kubenswrapper[4792]: I0121 19:00:03.813027 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz"] Jan 21 19:00:03 crc kubenswrapper[4792]: I0121 19:00:03.822791 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-pgpdz"] Jan 21 19:00:04 crc kubenswrapper[4792]: I0121 19:00:04.255529 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54e9171e-bf7c-49e7-8fda-1063833badf9" path="/var/lib/kubelet/pods/54e9171e-bf7c-49e7-8fda-1063833badf9/volumes" Jan 21 19:00:14 crc kubenswrapper[4792]: I0121 19:00:14.247168 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:00:14 crc kubenswrapper[4792]: E0121 19:00:14.248371 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:00:15 crc kubenswrapper[4792]: I0121 19:00:15.790905 4792 scope.go:117] "RemoveContainer" containerID="1ebbf21a82c1d6bb6b63d6324077c5be3847620544afe000ece52ae8bcbb4717" Jan 21 19:00:26 crc kubenswrapper[4792]: I0121 19:00:26.256390 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:00:26 crc kubenswrapper[4792]: E0121 19:00:26.257488 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:00:41 crc kubenswrapper[4792]: I0121 19:00:41.246665 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:00:41 crc kubenswrapper[4792]: E0121 19:00:41.247356 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.301020 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:00:50 crc kubenswrapper[4792]: E0121 19:00:50.302927 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c665db87-7eb0-4414-8e85-4d1643ebec60" containerName="collect-profiles" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.303020 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c665db87-7eb0-4414-8e85-4d1643ebec60" containerName="collect-profiles" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.303211 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c665db87-7eb0-4414-8e85-4d1643ebec60" containerName="collect-profiles" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.303760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.315213 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.369655 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwdtt\" (UniqueName: \"kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt\") pod \"service-telemetry-framework-operators-l7gd8\" (UID: \"b2ba4d1e-6972-4c10-9572-bc698f3c87eb\") " pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.471633 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwdtt\" (UniqueName: \"kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt\") pod \"service-telemetry-framework-operators-l7gd8\" (UID: \"b2ba4d1e-6972-4c10-9572-bc698f3c87eb\") " pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.513777 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwdtt\" (UniqueName: \"kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt\") pod \"service-telemetry-framework-operators-l7gd8\" (UID: \"b2ba4d1e-6972-4c10-9572-bc698f3c87eb\") " pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.628838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.868506 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:00:50 crc kubenswrapper[4792]: I0121 19:00:50.876091 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:00:51 crc kubenswrapper[4792]: I0121 19:00:51.844286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" event={"ID":"b2ba4d1e-6972-4c10-9572-bc698f3c87eb","Type":"ContainerStarted","Data":"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4"} Jan 21 19:00:51 crc kubenswrapper[4792]: I0121 19:00:51.844602 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" event={"ID":"b2ba4d1e-6972-4c10-9572-bc698f3c87eb","Type":"ContainerStarted","Data":"e81631ae8a47a07e1d593299e9b691f8d609c966352c98011e4e8de1f844dee9"} Jan 21 19:00:51 crc kubenswrapper[4792]: I0121 19:00:51.860798 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" podStartSLOduration=1.749074056 podStartE2EDuration="1.860782617s" podCreationTimestamp="2026-01-21 19:00:50 +0000 UTC" firstStartedPulling="2026-01-21 19:00:50.875762732 +0000 UTC m=+3884.857725918" lastFinishedPulling="2026-01-21 19:00:50.987471293 +0000 UTC m=+3884.969434479" observedRunningTime="2026-01-21 19:00:51.858345401 +0000 UTC m=+3885.840308587" watchObservedRunningTime="2026-01-21 19:00:51.860782617 +0000 UTC m=+3885.842745803" Jan 21 19:00:54 crc kubenswrapper[4792]: I0121 19:00:54.247051 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:00:54 crc kubenswrapper[4792]: E0121 19:00:54.247623 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:01:00 crc kubenswrapper[4792]: I0121 19:01:00.629529 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:00 crc kubenswrapper[4792]: I0121 19:01:00.630265 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:00 crc kubenswrapper[4792]: I0121 19:01:00.665115 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:00 crc kubenswrapper[4792]: I0121 19:01:00.944909 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:00 crc kubenswrapper[4792]: I0121 19:01:00.987460 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:01:02 crc kubenswrapper[4792]: I0121 19:01:02.937191 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" podUID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" containerName="registry-server" containerID="cri-o://33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4" gracePeriod=2 Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.317445 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.492995 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwdtt\" (UniqueName: \"kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt\") pod \"b2ba4d1e-6972-4c10-9572-bc698f3c87eb\" (UID: \"b2ba4d1e-6972-4c10-9572-bc698f3c87eb\") " Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.751436 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt" (OuterVolumeSpecName: "kube-api-access-wwdtt") pod "b2ba4d1e-6972-4c10-9572-bc698f3c87eb" (UID: "b2ba4d1e-6972-4c10-9572-bc698f3c87eb"). InnerVolumeSpecName "kube-api-access-wwdtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.798109 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwdtt\" (UniqueName: \"kubernetes.io/projected/b2ba4d1e-6972-4c10-9572-bc698f3c87eb-kube-api-access-wwdtt\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.947280 4792 generic.go:334] "Generic (PLEG): container finished" podID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" containerID="33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4" exitCode=0 Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.947345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" event={"ID":"b2ba4d1e-6972-4c10-9572-bc698f3c87eb","Type":"ContainerDied","Data":"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4"} Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.947423 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" event={"ID":"b2ba4d1e-6972-4c10-9572-bc698f3c87eb","Type":"ContainerDied","Data":"e81631ae8a47a07e1d593299e9b691f8d609c966352c98011e4e8de1f844dee9"} Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.947447 4792 scope.go:117] "RemoveContainer" containerID="33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.947374 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-l7gd8" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.983225 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.983338 4792 scope.go:117] "RemoveContainer" containerID="33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4" Jan 21 19:01:03 crc kubenswrapper[4792]: E0121 19:01:03.984971 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4\": container with ID starting with 33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4 not found: ID does not exist" containerID="33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.985004 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4"} err="failed to get container status \"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4\": rpc error: code = NotFound desc = could not find container \"33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4\": container with ID starting with 33d966f11e4a21a6931990a557abc2a024445d32fa6b35486d067d269b25ddb4 not found: ID does not exist" Jan 21 19:01:03 crc kubenswrapper[4792]: I0121 19:01:03.987061 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-l7gd8"] Jan 21 19:01:04 crc kubenswrapper[4792]: I0121 19:01:04.258732 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" path="/var/lib/kubelet/pods/b2ba4d1e-6972-4c10-9572-bc698f3c87eb/volumes" Jan 21 19:01:08 crc kubenswrapper[4792]: I0121 19:01:08.247691 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:01:08 crc kubenswrapper[4792]: E0121 19:01:08.248199 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:01:20 crc kubenswrapper[4792]: I0121 19:01:20.251423 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:01:20 crc kubenswrapper[4792]: E0121 19:01:20.252364 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:01:31 crc kubenswrapper[4792]: I0121 19:01:31.246899 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:01:31 crc kubenswrapper[4792]: E0121 19:01:31.247691 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:01:43 crc kubenswrapper[4792]: I0121 19:01:43.246551 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:01:43 crc kubenswrapper[4792]: E0121 19:01:43.247286 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:01:57 crc kubenswrapper[4792]: I0121 19:01:57.246929 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:01:57 crc kubenswrapper[4792]: E0121 19:01:57.247432 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:02:09 crc kubenswrapper[4792]: I0121 19:02:09.246543 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:02:09 crc kubenswrapper[4792]: E0121 19:02:09.247280 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:02:22 crc kubenswrapper[4792]: I0121 19:02:22.246650 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:02:22 crc kubenswrapper[4792]: E0121 19:02:22.247469 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:02:33 crc kubenswrapper[4792]: I0121 19:02:33.247074 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:02:33 crc kubenswrapper[4792]: I0121 19:02:33.572384 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d"} Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.778061 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:44 crc kubenswrapper[4792]: E0121 19:03:44.778980 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" containerName="registry-server" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.778997 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" containerName="registry-server" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.779173 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ba4d1e-6972-4c10-9572-bc698f3c87eb" containerName="registry-server" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.780340 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.797920 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.849532 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.849641 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skgws\" (UniqueName: \"kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.849690 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.951107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skgws\" (UniqueName: \"kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.951175 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.951244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.951782 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.951914 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:44 crc kubenswrapper[4792]: I0121 19:03:44.976669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skgws\" (UniqueName: \"kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws\") pod \"redhat-operators-dsfvb\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:45 crc kubenswrapper[4792]: I0121 19:03:45.103181 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:45 crc kubenswrapper[4792]: I0121 19:03:45.516002 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:46 crc kubenswrapper[4792]: I0121 19:03:46.085906 4792 generic.go:334] "Generic (PLEG): container finished" podID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerID="031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a" exitCode=0 Jan 21 19:03:46 crc kubenswrapper[4792]: I0121 19:03:46.085948 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerDied","Data":"031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a"} Jan 21 19:03:46 crc kubenswrapper[4792]: I0121 19:03:46.085973 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerStarted","Data":"7537450472cad5f5580870c8fba726bdf6351a31aa233223695ab04df785d9ea"} Jan 21 19:03:47 crc kubenswrapper[4792]: I0121 19:03:47.094351 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerStarted","Data":"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f"} Jan 21 19:03:48 crc kubenswrapper[4792]: I0121 19:03:48.102401 4792 generic.go:334] "Generic (PLEG): container finished" podID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerID="a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f" exitCode=0 Jan 21 19:03:48 crc kubenswrapper[4792]: I0121 19:03:48.102454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerDied","Data":"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f"} Jan 21 19:03:50 crc kubenswrapper[4792]: I0121 19:03:50.118980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerStarted","Data":"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856"} Jan 21 19:03:50 crc kubenswrapper[4792]: I0121 19:03:50.142292 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dsfvb" podStartSLOduration=3.011109547 podStartE2EDuration="6.142263987s" podCreationTimestamp="2026-01-21 19:03:44 +0000 UTC" firstStartedPulling="2026-01-21 19:03:46.087239213 +0000 UTC m=+4060.069202399" lastFinishedPulling="2026-01-21 19:03:49.218393633 +0000 UTC m=+4063.200356839" observedRunningTime="2026-01-21 19:03:50.139465662 +0000 UTC m=+4064.121428838" watchObservedRunningTime="2026-01-21 19:03:50.142263987 +0000 UTC m=+4064.124227173" Jan 21 19:03:55 crc kubenswrapper[4792]: I0121 19:03:55.103552 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:55 crc kubenswrapper[4792]: I0121 19:03:55.104289 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:55 crc kubenswrapper[4792]: I0121 19:03:55.145207 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:55 crc kubenswrapper[4792]: I0121 19:03:55.199775 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:55 crc kubenswrapper[4792]: I0121 19:03:55.381064 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:57 crc kubenswrapper[4792]: I0121 19:03:57.163470 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dsfvb" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="registry-server" containerID="cri-o://d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856" gracePeriod=2 Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.628169 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.688709 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities\") pod \"0e66a723-9362-402a-94fa-2eb4b2c0511b\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.688816 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content\") pod \"0e66a723-9362-402a-94fa-2eb4b2c0511b\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.688838 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skgws\" (UniqueName: \"kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws\") pod \"0e66a723-9362-402a-94fa-2eb4b2c0511b\" (UID: \"0e66a723-9362-402a-94fa-2eb4b2c0511b\") " Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.690189 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities" (OuterVolumeSpecName: "utilities") pod "0e66a723-9362-402a-94fa-2eb4b2c0511b" (UID: "0e66a723-9362-402a-94fa-2eb4b2c0511b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.701164 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws" (OuterVolumeSpecName: "kube-api-access-skgws") pod "0e66a723-9362-402a-94fa-2eb4b2c0511b" (UID: "0e66a723-9362-402a-94fa-2eb4b2c0511b"). InnerVolumeSpecName "kube-api-access-skgws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.790846 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skgws\" (UniqueName: \"kubernetes.io/projected/0e66a723-9362-402a-94fa-2eb4b2c0511b-kube-api-access-skgws\") on node \"crc\" DevicePath \"\"" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.791201 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.822298 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e66a723-9362-402a-94fa-2eb4b2c0511b" (UID: "0e66a723-9362-402a-94fa-2eb4b2c0511b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:03:58 crc kubenswrapper[4792]: I0121 19:03:58.892560 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e66a723-9362-402a-94fa-2eb4b2c0511b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.181611 4792 generic.go:334] "Generic (PLEG): container finished" podID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerID="d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856" exitCode=0 Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.181655 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerDied","Data":"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856"} Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.181688 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dsfvb" event={"ID":"0e66a723-9362-402a-94fa-2eb4b2c0511b","Type":"ContainerDied","Data":"7537450472cad5f5580870c8fba726bdf6351a31aa233223695ab04df785d9ea"} Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.181688 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dsfvb" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.181703 4792 scope.go:117] "RemoveContainer" containerID="d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.215895 4792 scope.go:117] "RemoveContainer" containerID="a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.245497 4792 scope.go:117] "RemoveContainer" containerID="031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.247391 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.261923 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dsfvb"] Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.276316 4792 scope.go:117] "RemoveContainer" containerID="d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856" Jan 21 19:03:59 crc kubenswrapper[4792]: E0121 19:03:59.276692 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856\": container with ID starting with d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856 not found: ID does not exist" containerID="d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.276727 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856"} err="failed to get container status \"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856\": rpc error: code = NotFound desc = could not find container \"d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856\": container with ID starting with d09ce593b43df2090976e6a9a54124f977e836d4dd22fb1344a0120f28152856 not found: ID does not exist" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.276749 4792 scope.go:117] "RemoveContainer" containerID="a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f" Jan 21 19:03:59 crc kubenswrapper[4792]: E0121 19:03:59.276979 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f\": container with ID starting with a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f not found: ID does not exist" containerID="a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.277015 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f"} err="failed to get container status \"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f\": rpc error: code = NotFound desc = could not find container \"a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f\": container with ID starting with a6adef9dbdf089617bf9babccf5c3395df4329259de6e67d2c541aaac80e4e6f not found: ID does not exist" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.277029 4792 scope.go:117] "RemoveContainer" containerID="031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a" Jan 21 19:03:59 crc kubenswrapper[4792]: E0121 19:03:59.277206 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a\": container with ID starting with 031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a not found: ID does not exist" containerID="031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a" Jan 21 19:03:59 crc kubenswrapper[4792]: I0121 19:03:59.277231 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a"} err="failed to get container status \"031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a\": rpc error: code = NotFound desc = could not find container \"031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a\": container with ID starting with 031ae242563a413386fe3e1c907cfb0afdae2442da3bbd2c73216a5e3b8c9d0a not found: ID does not exist" Jan 21 19:04:00 crc kubenswrapper[4792]: I0121 19:04:00.265642 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" path="/var/lib/kubelet/pods/0e66a723-9362-402a-94fa-2eb4b2c0511b/volumes" Jan 21 19:04:53 crc kubenswrapper[4792]: I0121 19:04:53.570304 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:04:53 crc kubenswrapper[4792]: I0121 19:04:53.571388 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:05:23 crc kubenswrapper[4792]: I0121 19:05:23.570269 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:05:23 crc kubenswrapper[4792]: I0121 19:05:23.571318 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:05:53 crc kubenswrapper[4792]: I0121 19:05:53.571081 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:05:53 crc kubenswrapper[4792]: I0121 19:05:53.572072 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:05:53 crc kubenswrapper[4792]: I0121 19:05:53.572136 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 19:05:53 crc kubenswrapper[4792]: I0121 19:05:53.573373 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:05:53 crc kubenswrapper[4792]: I0121 19:05:53.573479 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d" gracePeriod=600 Jan 21 19:05:54 crc kubenswrapper[4792]: I0121 19:05:54.023695 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d" exitCode=0 Jan 21 19:05:54 crc kubenswrapper[4792]: I0121 19:05:54.024114 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d"} Jan 21 19:05:54 crc kubenswrapper[4792]: I0121 19:05:54.024155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerStarted","Data":"35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee"} Jan 21 19:05:54 crc kubenswrapper[4792]: I0121 19:05:54.024179 4792 scope.go:117] "RemoveContainer" containerID="87ff778e8e95e8df3024f43f2b59e384936eb3c8e103ec9b9eb2a8727855c505" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.076173 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:11 crc kubenswrapper[4792]: E0121 19:06:11.077108 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="extract-utilities" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.077126 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="extract-utilities" Jan 21 19:06:11 crc kubenswrapper[4792]: E0121 19:06:11.077143 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="extract-content" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.077149 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="extract-content" Jan 21 19:06:11 crc kubenswrapper[4792]: E0121 19:06:11.077162 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="registry-server" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.077168 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="registry-server" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.077324 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e66a723-9362-402a-94fa-2eb4b2c0511b" containerName="registry-server" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.079247 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.086228 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.242596 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.243006 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj9wc\" (UniqueName: \"kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.243308 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.344524 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.345068 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.345435 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.345482 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj9wc\" (UniqueName: \"kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.346365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.651323 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj9wc\" (UniqueName: \"kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc\") pod \"community-operators-skdvq\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:11 crc kubenswrapper[4792]: I0121 19:06:11.702494 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:12 crc kubenswrapper[4792]: I0121 19:06:12.146955 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:12 crc kubenswrapper[4792]: I0121 19:06:12.163469 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerStarted","Data":"e1d97b8da27689047bac60c8345ad4943823e431f8bbe5bc248c42d305a088a3"} Jan 21 19:06:13 crc kubenswrapper[4792]: I0121 19:06:13.173343 4792 generic.go:334] "Generic (PLEG): container finished" podID="8c29e408-89a5-441d-94ed-5b83336b9255" containerID="95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e" exitCode=0 Jan 21 19:06:13 crc kubenswrapper[4792]: I0121 19:06:13.173415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerDied","Data":"95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e"} Jan 21 19:06:13 crc kubenswrapper[4792]: I0121 19:06:13.178263 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:06:14 crc kubenswrapper[4792]: I0121 19:06:14.180592 4792 generic.go:334] "Generic (PLEG): container finished" podID="8c29e408-89a5-441d-94ed-5b83336b9255" containerID="7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115" exitCode=0 Jan 21 19:06:14 crc kubenswrapper[4792]: I0121 19:06:14.180914 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerDied","Data":"7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115"} Jan 21 19:06:15 crc kubenswrapper[4792]: I0121 19:06:15.189389 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerStarted","Data":"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62"} Jan 21 19:06:15 crc kubenswrapper[4792]: I0121 19:06:15.215908 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-skdvq" podStartSLOduration=2.710160428 podStartE2EDuration="4.21589189s" podCreationTimestamp="2026-01-21 19:06:11 +0000 UTC" firstStartedPulling="2026-01-21 19:06:13.177905569 +0000 UTC m=+4207.159868755" lastFinishedPulling="2026-01-21 19:06:14.683637021 +0000 UTC m=+4208.665600217" observedRunningTime="2026-01-21 19:06:15.21437541 +0000 UTC m=+4209.196338616" watchObservedRunningTime="2026-01-21 19:06:15.21589189 +0000 UTC m=+4209.197855076" Jan 21 19:06:21 crc kubenswrapper[4792]: I0121 19:06:21.703806 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:21 crc kubenswrapper[4792]: I0121 19:06:21.704160 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:21 crc kubenswrapper[4792]: I0121 19:06:21.743098 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:22 crc kubenswrapper[4792]: I0121 19:06:22.283947 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:22 crc kubenswrapper[4792]: I0121 19:06:22.322924 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:24 crc kubenswrapper[4792]: I0121 19:06:24.255433 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-skdvq" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="registry-server" containerID="cri-o://3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62" gracePeriod=2 Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.108099 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.265821 4792 generic.go:334] "Generic (PLEG): container finished" podID="8c29e408-89a5-441d-94ed-5b83336b9255" containerID="3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62" exitCode=0 Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.265885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerDied","Data":"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62"} Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.265924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skdvq" event={"ID":"8c29e408-89a5-441d-94ed-5b83336b9255","Type":"ContainerDied","Data":"e1d97b8da27689047bac60c8345ad4943823e431f8bbe5bc248c42d305a088a3"} Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.265935 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skdvq" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.265959 4792 scope.go:117] "RemoveContainer" containerID="3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.274832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content\") pod \"8c29e408-89a5-441d-94ed-5b83336b9255\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.274958 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities\") pod \"8c29e408-89a5-441d-94ed-5b83336b9255\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.275004 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj9wc\" (UniqueName: \"kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc\") pod \"8c29e408-89a5-441d-94ed-5b83336b9255\" (UID: \"8c29e408-89a5-441d-94ed-5b83336b9255\") " Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.276010 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities" (OuterVolumeSpecName: "utilities") pod "8c29e408-89a5-441d-94ed-5b83336b9255" (UID: "8c29e408-89a5-441d-94ed-5b83336b9255"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.282651 4792 scope.go:117] "RemoveContainer" containerID="7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.282769 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc" (OuterVolumeSpecName: "kube-api-access-sj9wc") pod "8c29e408-89a5-441d-94ed-5b83336b9255" (UID: "8c29e408-89a5-441d-94ed-5b83336b9255"). InnerVolumeSpecName "kube-api-access-sj9wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.312310 4792 scope.go:117] "RemoveContainer" containerID="95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.332631 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c29e408-89a5-441d-94ed-5b83336b9255" (UID: "8c29e408-89a5-441d-94ed-5b83336b9255"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.333607 4792 scope.go:117] "RemoveContainer" containerID="3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62" Jan 21 19:06:25 crc kubenswrapper[4792]: E0121 19:06:25.334316 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62\": container with ID starting with 3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62 not found: ID does not exist" containerID="3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.334353 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62"} err="failed to get container status \"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62\": rpc error: code = NotFound desc = could not find container \"3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62\": container with ID starting with 3b45458ac3021eac7fdb7b45a16d4dd741838da93edf2ca526732881d5726b62 not found: ID does not exist" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.334393 4792 scope.go:117] "RemoveContainer" containerID="7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115" Jan 21 19:06:25 crc kubenswrapper[4792]: E0121 19:06:25.334691 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115\": container with ID starting with 7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115 not found: ID does not exist" containerID="7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.334713 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115"} err="failed to get container status \"7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115\": rpc error: code = NotFound desc = could not find container \"7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115\": container with ID starting with 7d511e57454548ce395eb0809ad406de70e57a13d49b26bb198f1fa292068115 not found: ID does not exist" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.334726 4792 scope.go:117] "RemoveContainer" containerID="95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e" Jan 21 19:06:25 crc kubenswrapper[4792]: E0121 19:06:25.334995 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e\": container with ID starting with 95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e not found: ID does not exist" containerID="95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.335039 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e"} err="failed to get container status \"95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e\": rpc error: code = NotFound desc = could not find container \"95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e\": container with ID starting with 95cd516cff079dc2b507851d35468998baa3c9d28c48c8b1e811d09ffcd6f41e not found: ID does not exist" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.376408 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.376457 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c29e408-89a5-441d-94ed-5b83336b9255-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.376468 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj9wc\" (UniqueName: \"kubernetes.io/projected/8c29e408-89a5-441d-94ed-5b83336b9255-kube-api-access-sj9wc\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.598139 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:25 crc kubenswrapper[4792]: I0121 19:06:25.602695 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-skdvq"] Jan 21 19:06:26 crc kubenswrapper[4792]: I0121 19:06:26.253269 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" path="/var/lib/kubelet/pods/8c29e408-89a5-441d-94ed-5b83336b9255/volumes" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.801916 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:44 crc kubenswrapper[4792]: E0121 19:06:44.802741 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="extract-utilities" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.802761 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="extract-utilities" Jan 21 19:06:44 crc kubenswrapper[4792]: E0121 19:06:44.802776 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="extract-content" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.802783 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="extract-content" Jan 21 19:06:44 crc kubenswrapper[4792]: E0121 19:06:44.802801 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="registry-server" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.802806 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="registry-server" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.802979 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c29e408-89a5-441d-94ed-5b83336b9255" containerName="registry-server" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.803424 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.822298 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:44 crc kubenswrapper[4792]: I0121 19:06:44.915524 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg49v\" (UniqueName: \"kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v\") pod \"service-telemetry-framework-operators-ftqr8\" (UID: \"03c815e8-23cf-4760-aa14-f204f0552b1d\") " pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:45 crc kubenswrapper[4792]: I0121 19:06:45.017408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg49v\" (UniqueName: \"kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v\") pod \"service-telemetry-framework-operators-ftqr8\" (UID: \"03c815e8-23cf-4760-aa14-f204f0552b1d\") " pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:45 crc kubenswrapper[4792]: I0121 19:06:45.035560 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg49v\" (UniqueName: \"kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v\") pod \"service-telemetry-framework-operators-ftqr8\" (UID: \"03c815e8-23cf-4760-aa14-f204f0552b1d\") " pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:45 crc kubenswrapper[4792]: I0121 19:06:45.123948 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:45 crc kubenswrapper[4792]: I0121 19:06:45.361920 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:45 crc kubenswrapper[4792]: I0121 19:06:45.401691 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" event={"ID":"03c815e8-23cf-4760-aa14-f204f0552b1d","Type":"ContainerStarted","Data":"76c59ca69c4f7d1e13ef9c60ebd5a088b7ec63b45bbc20da9b8c490fc96ba15b"} Jan 21 19:06:46 crc kubenswrapper[4792]: I0121 19:06:46.408686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" event={"ID":"03c815e8-23cf-4760-aa14-f204f0552b1d","Type":"ContainerStarted","Data":"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf"} Jan 21 19:06:46 crc kubenswrapper[4792]: I0121 19:06:46.427825 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" podStartSLOduration=2.320368528 podStartE2EDuration="2.427806942s" podCreationTimestamp="2026-01-21 19:06:44 +0000 UTC" firstStartedPulling="2026-01-21 19:06:45.379833858 +0000 UTC m=+4239.361797044" lastFinishedPulling="2026-01-21 19:06:45.487272272 +0000 UTC m=+4239.469235458" observedRunningTime="2026-01-21 19:06:46.42550054 +0000 UTC m=+4240.407463736" watchObservedRunningTime="2026-01-21 19:06:46.427806942 +0000 UTC m=+4240.409770128" Jan 21 19:06:55 crc kubenswrapper[4792]: I0121 19:06:55.124896 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:55 crc kubenswrapper[4792]: I0121 19:06:55.125408 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:55 crc kubenswrapper[4792]: I0121 19:06:55.157102 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:55 crc kubenswrapper[4792]: I0121 19:06:55.499691 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:55 crc kubenswrapper[4792]: I0121 19:06:55.561612 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:57 crc kubenswrapper[4792]: I0121 19:06:57.479867 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" podUID="03c815e8-23cf-4760-aa14-f204f0552b1d" containerName="registry-server" containerID="cri-o://f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf" gracePeriod=2 Jan 21 19:06:57 crc kubenswrapper[4792]: I0121 19:06:57.833644 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.011631 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg49v\" (UniqueName: \"kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v\") pod \"03c815e8-23cf-4760-aa14-f204f0552b1d\" (UID: \"03c815e8-23cf-4760-aa14-f204f0552b1d\") " Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.018688 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v" (OuterVolumeSpecName: "kube-api-access-sg49v") pod "03c815e8-23cf-4760-aa14-f204f0552b1d" (UID: "03c815e8-23cf-4760-aa14-f204f0552b1d"). InnerVolumeSpecName "kube-api-access-sg49v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.114200 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg49v\" (UniqueName: \"kubernetes.io/projected/03c815e8-23cf-4760-aa14-f204f0552b1d-kube-api-access-sg49v\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.486966 4792 generic.go:334] "Generic (PLEG): container finished" podID="03c815e8-23cf-4760-aa14-f204f0552b1d" containerID="f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf" exitCode=0 Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.487003 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.487009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" event={"ID":"03c815e8-23cf-4760-aa14-f204f0552b1d","Type":"ContainerDied","Data":"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf"} Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.487050 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-operators-ftqr8" event={"ID":"03c815e8-23cf-4760-aa14-f204f0552b1d","Type":"ContainerDied","Data":"76c59ca69c4f7d1e13ef9c60ebd5a088b7ec63b45bbc20da9b8c490fc96ba15b"} Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.487069 4792 scope.go:117] "RemoveContainer" containerID="f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.507593 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.511125 4792 scope.go:117] "RemoveContainer" containerID="f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.517338 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-framework-operators-ftqr8"] Jan 21 19:06:58 crc kubenswrapper[4792]: E0121 19:06:58.517959 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf\": container with ID starting with f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf not found: ID does not exist" containerID="f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf" Jan 21 19:06:58 crc kubenswrapper[4792]: I0121 19:06:58.518016 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf"} err="failed to get container status \"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf\": rpc error: code = NotFound desc = could not find container \"f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf\": container with ID starting with f8bf9cc44a1325b2bd05f3b0ac8936fe8c1e74f67a546436961bf4267bab62bf not found: ID does not exist" Jan 21 19:07:00 crc kubenswrapper[4792]: I0121 19:07:00.276164 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c815e8-23cf-4760-aa14-f204f0552b1d" path="/var/lib/kubelet/pods/03c815e8-23cf-4760-aa14-f204f0552b1d/volumes" Jan 21 19:07:53 crc kubenswrapper[4792]: I0121 19:07:53.570677 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:07:53 crc kubenswrapper[4792]: I0121 19:07:53.571237 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:08:23 crc kubenswrapper[4792]: I0121 19:08:23.571032 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:08:23 crc kubenswrapper[4792]: I0121 19:08:23.571757 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:08:53 crc kubenswrapper[4792]: I0121 19:08:53.570444 4792 patch_prober.go:28] interesting pod/machine-config-daemon-m5d6x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:08:53 crc kubenswrapper[4792]: I0121 19:08:53.570978 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:08:53 crc kubenswrapper[4792]: I0121 19:08:53.571025 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" Jan 21 19:08:53 crc kubenswrapper[4792]: I0121 19:08:53.571816 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee"} pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:08:53 crc kubenswrapper[4792]: I0121 19:08:53.571906 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" containerName="machine-config-daemon" containerID="cri-o://35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee" gracePeriod=600 Jan 21 19:08:53 crc kubenswrapper[4792]: E0121 19:08:53.695395 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:08:54 crc kubenswrapper[4792]: I0121 19:08:54.364672 4792 generic.go:334] "Generic (PLEG): container finished" podID="759f2e21-e44e-4049-b262-cb49448e22ab" containerID="35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee" exitCode=0 Jan 21 19:08:54 crc kubenswrapper[4792]: I0121 19:08:54.364716 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" event={"ID":"759f2e21-e44e-4049-b262-cb49448e22ab","Type":"ContainerDied","Data":"35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee"} Jan 21 19:08:54 crc kubenswrapper[4792]: I0121 19:08:54.364747 4792 scope.go:117] "RemoveContainer" containerID="0864a8dd3645aaaa500158cbb8866641993fcaa83f4d5b1564b32da7e767190d" Jan 21 19:08:54 crc kubenswrapper[4792]: I0121 19:08:54.365269 4792 scope.go:117] "RemoveContainer" containerID="35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee" Jan 21 19:08:54 crc kubenswrapper[4792]: E0121 19:08:54.365457 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" Jan 21 19:09:06 crc kubenswrapper[4792]: I0121 19:09:06.250210 4792 scope.go:117] "RemoveContainer" containerID="35a61f0a342bea4c1980927e301f1f64d3da7eab599bba55fc99d254ca55fdee" Jan 21 19:09:06 crc kubenswrapper[4792]: E0121 19:09:06.251052 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5d6x_openshift-machine-config-operator(759f2e21-e44e-4049-b262-cb49448e22ab)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5d6x" podUID="759f2e21-e44e-4049-b262-cb49448e22ab" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134222133024441 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134222134017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134211134016500 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134211135015451 5ustar corecore